]>
Commit | Line | Data |
---|---|---|
62d6a022 | 1 | /* Analyze RTL for GNU compiler. |
d353bf18 | 2 | Copyright (C) 1987-2015 Free Software Foundation, Inc. |
635aff97 | 3 | |
f12b58b3 | 4 | This file is part of GCC. |
635aff97 | 5 | |
f12b58b3 | 6 | GCC is free software; you can redistribute it and/or modify it under |
7 | the terms of the GNU General Public License as published by the Free | |
8c4c00c1 | 8 | Software Foundation; either version 3, or (at your option) any later |
f12b58b3 | 9 | version. |
635aff97 | 10 | |
f12b58b3 | 11 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
12 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
14 | for more details. | |
635aff97 | 15 | |
16 | You should have received a copy of the GNU General Public License | |
8c4c00c1 | 17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ | |
635aff97 | 19 | |
20 | ||
21 | #include "config.h" | |
405711de | 22 | #include "system.h" |
805e22b2 | 23 | #include "coretypes.h" |
24 | #include "tm.h" | |
0b205f4c | 25 | #include "diagnostic-core.h" |
b8011969 | 26 | #include "hard-reg-set.h" |
b2528b76 | 27 | #include "rtl.h" |
aee989f5 | 28 | #include "insn-config.h" |
29 | #include "recog.h" | |
26619827 | 30 | #include "target.h" |
31 | #include "output.h" | |
77ec0c64 | 32 | #include "tm_p.h" |
350b17ef | 33 | #include "flags.h" |
67d6c12b | 34 | #include "regs.h" |
a3020f2f | 35 | #include "hashtab.h" |
36 | #include "hash-set.h" | |
37 | #include "vec.h" | |
a3020f2f | 38 | #include "input.h" |
d263732c | 39 | #include "function.h" |
94ea8568 | 40 | #include "predict.h" |
41 | #include "basic-block.h" | |
3072d30e | 42 | #include "df.h" |
b20a8bb4 | 43 | #include "symtab.h" |
b20a8bb4 | 44 | #include "inchash.h" |
e0ab7256 | 45 | #include "tree.h" |
06f9d6ef | 46 | #include "emit-rtl.h" /* FIXME: Can go away once crtl is moved to rtl.h. */ |
1efe9e9d | 47 | #include "addresses.h" |
69924e56 | 48 | #include "rtl-iter.h" |
635aff97 | 49 | |
99b86c05 | 50 | /* Forward declarations */ |
81a410b1 | 51 | static void set_of_1 (rtx, const_rtx, void *); |
dd9b9fc5 | 52 | static bool covers_regno_p (const_rtx, unsigned int); |
53 | static bool covers_regno_no_parallel_p (const_rtx, unsigned int); | |
dd9b9fc5 | 54 | static int computed_jump_p_1 (const_rtx); |
81a410b1 | 55 | static void parms_set (rtx, const_rtx, void *); |
ca6d6e84 | 56 | |
3754d046 | 57 | static unsigned HOST_WIDE_INT cached_nonzero_bits (const_rtx, machine_mode, |
58 | const_rtx, machine_mode, | |
d263732c | 59 | unsigned HOST_WIDE_INT); |
3754d046 | 60 | static unsigned HOST_WIDE_INT nonzero_bits1 (const_rtx, machine_mode, |
61 | const_rtx, machine_mode, | |
d263732c | 62 | unsigned HOST_WIDE_INT); |
3754d046 | 63 | static unsigned int cached_num_sign_bit_copies (const_rtx, machine_mode, const_rtx, |
64 | machine_mode, | |
d263732c | 65 | unsigned int); |
3754d046 | 66 | static unsigned int num_sign_bit_copies1 (const_rtx, machine_mode, const_rtx, |
67 | machine_mode, unsigned int); | |
d263732c | 68 | |
69924e56 | 69 | rtx_subrtx_bound_info rtx_all_subrtx_bounds[NUM_RTX_CODE]; |
70 | rtx_subrtx_bound_info rtx_nonconst_subrtx_bounds[NUM_RTX_CODE]; | |
71 | ||
4956440a | 72 | /* Truncation narrows the mode from SOURCE mode to DESTINATION mode. |
73 | If TARGET_MODE_REP_EXTENDED (DESTINATION, DESTINATION_REP) is | |
74 | SIGN_EXTEND then while narrowing we also have to enforce the | |
75 | representation and sign-extend the value to mode DESTINATION_REP. | |
76 | ||
77 | If the value is already sign-extended to DESTINATION_REP mode we | |
78 | can just switch to DESTINATION mode on it. For each pair of | |
79 | integral modes SOURCE and DESTINATION, when truncating from SOURCE | |
80 | to DESTINATION, NUM_SIGN_BIT_COPIES_IN_REP[SOURCE][DESTINATION] | |
81 | contains the number of high-order bits in SOURCE that have to be | |
82 | copies of the sign-bit so that we can do this mode-switch to | |
83 | DESTINATION. */ | |
84 | ||
85 | static unsigned int | |
86 | num_sign_bit_copies_in_rep[MAX_MODE_INT + 1][MAX_MODE_INT + 1]; | |
635aff97 | 87 | \f |
69924e56 | 88 | /* Store X into index I of ARRAY. ARRAY is known to have at least I |
89 | elements. Return the new base of ARRAY. */ | |
90 | ||
91 | template <typename T> | |
92 | typename T::value_type * | |
93 | generic_subrtx_iterator <T>::add_single_to_queue (array_type &array, | |
94 | value_type *base, | |
95 | size_t i, value_type x) | |
96 | { | |
97 | if (base == array.stack) | |
98 | { | |
99 | if (i < LOCAL_ELEMS) | |
100 | { | |
101 | base[i] = x; | |
102 | return base; | |
103 | } | |
104 | gcc_checking_assert (i == LOCAL_ELEMS); | |
4f7f3b39 | 105 | /* A previous iteration might also have moved from the stack to the |
106 | heap, in which case the heap array will already be big enough. */ | |
107 | if (vec_safe_length (array.heap) <= i) | |
108 | vec_safe_grow (array.heap, i + 1); | |
69924e56 | 109 | base = array.heap->address (); |
110 | memcpy (base, array.stack, sizeof (array.stack)); | |
111 | base[LOCAL_ELEMS] = x; | |
112 | return base; | |
113 | } | |
114 | unsigned int length = array.heap->length (); | |
115 | if (length > i) | |
116 | { | |
117 | gcc_checking_assert (base == array.heap->address ()); | |
118 | base[i] = x; | |
119 | return base; | |
120 | } | |
121 | else | |
122 | { | |
123 | gcc_checking_assert (i == length); | |
124 | vec_safe_push (array.heap, x); | |
125 | return array.heap->address (); | |
126 | } | |
127 | } | |
128 | ||
129 | /* Add the subrtxes of X to worklist ARRAY, starting at END. Return the | |
130 | number of elements added to the worklist. */ | |
131 | ||
132 | template <typename T> | |
133 | size_t | |
134 | generic_subrtx_iterator <T>::add_subrtxes_to_queue (array_type &array, | |
135 | value_type *base, | |
136 | size_t end, rtx_type x) | |
137 | { | |
e43f6e8c | 138 | enum rtx_code code = GET_CODE (x); |
139 | const char *format = GET_RTX_FORMAT (code); | |
69924e56 | 140 | size_t orig_end = end; |
e43f6e8c | 141 | if (__builtin_expect (INSN_P (x), false)) |
142 | { | |
143 | /* Put the pattern at the top of the queue, since that's what | |
144 | we're likely to want most. It also allows for the SEQUENCE | |
145 | code below. */ | |
146 | for (int i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; --i) | |
147 | if (format[i] == 'e') | |
148 | { | |
149 | value_type subx = T::get_value (x->u.fld[i].rt_rtx); | |
150 | if (__builtin_expect (end < LOCAL_ELEMS, true)) | |
151 | base[end++] = subx; | |
152 | else | |
153 | base = add_single_to_queue (array, base, end++, subx); | |
154 | } | |
155 | } | |
156 | else | |
157 | for (int i = 0; format[i]; ++i) | |
158 | if (format[i] == 'e') | |
159 | { | |
160 | value_type subx = T::get_value (x->u.fld[i].rt_rtx); | |
161 | if (__builtin_expect (end < LOCAL_ELEMS, true)) | |
162 | base[end++] = subx; | |
163 | else | |
164 | base = add_single_to_queue (array, base, end++, subx); | |
165 | } | |
166 | else if (format[i] == 'E') | |
167 | { | |
168 | unsigned int length = GET_NUM_ELEM (x->u.fld[i].rt_rtvec); | |
169 | rtx *vec = x->u.fld[i].rt_rtvec->elem; | |
170 | if (__builtin_expect (end + length <= LOCAL_ELEMS, true)) | |
171 | for (unsigned int j = 0; j < length; j++) | |
172 | base[end++] = T::get_value (vec[j]); | |
173 | else | |
174 | for (unsigned int j = 0; j < length; j++) | |
175 | base = add_single_to_queue (array, base, end++, | |
176 | T::get_value (vec[j])); | |
177 | if (code == SEQUENCE && end == length) | |
178 | /* If the subrtxes of the sequence fill the entire array then | |
179 | we know that no other parts of a containing insn are queued. | |
180 | The caller is therefore iterating over the sequence as a | |
181 | PATTERN (...), so we also want the patterns of the | |
182 | subinstructions. */ | |
183 | for (unsigned int j = 0; j < length; j++) | |
184 | { | |
185 | typename T::rtx_type x = T::get_rtx (base[j]); | |
186 | if (INSN_P (x)) | |
187 | base[j] = T::get_value (PATTERN (x)); | |
188 | } | |
189 | } | |
69924e56 | 190 | return end - orig_end; |
191 | } | |
192 | ||
193 | template <typename T> | |
194 | void | |
195 | generic_subrtx_iterator <T>::free_array (array_type &array) | |
196 | { | |
197 | vec_free (array.heap); | |
198 | } | |
199 | ||
200 | template <typename T> | |
201 | const size_t generic_subrtx_iterator <T>::LOCAL_ELEMS; | |
202 | ||
203 | template class generic_subrtx_iterator <const_rtx_accessor>; | |
204 | template class generic_subrtx_iterator <rtx_var_accessor>; | |
205 | template class generic_subrtx_iterator <rtx_ptr_accessor>; | |
206 | ||
635aff97 | 207 | /* Return 1 if the value of X is unstable |
208 | (would be different at a different point in the program). | |
209 | The frame pointer, arg pointer, etc. are considered stable | |
210 | (within one function) and so is anything marked `unchanging'. */ | |
211 | ||
212 | int | |
dd9b9fc5 | 213 | rtx_unstable_p (const_rtx x) |
635aff97 | 214 | { |
dd9b9fc5 | 215 | const RTX_CODE code = GET_CODE (x); |
19cb6b50 | 216 | int i; |
217 | const char *fmt; | |
635aff97 | 218 | |
a3c6603a | 219 | switch (code) |
220 | { | |
221 | case MEM: | |
b04fab2a | 222 | return !MEM_READONLY_P (x) || rtx_unstable_p (XEXP (x, 0)); |
635aff97 | 223 | |
a3c6603a | 224 | case CONST: |
0349edce | 225 | CASE_CONST_ANY: |
a3c6603a | 226 | case SYMBOL_REF: |
227 | case LABEL_REF: | |
228 | return 0; | |
635aff97 | 229 | |
a3c6603a | 230 | case REG: |
231 | /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */ | |
d9c8e13d | 232 | if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx |
b8011969 | 233 | /* The arg pointer varies if it is not a fixed register. */ |
b04fab2a | 234 | || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])) |
d9c8e13d | 235 | return 0; |
d9c8e13d | 236 | /* ??? When call-clobbered, the value is stable modulo the restore |
237 | that must happen after a call. This currently screws up local-alloc | |
238 | into believing that the restore is not needed. */ | |
260e669e | 239 | if (!PIC_OFFSET_TABLE_REG_CALL_CLOBBERED && x == pic_offset_table_rtx) |
d9c8e13d | 240 | return 0; |
d9c8e13d | 241 | return 1; |
a3c6603a | 242 | |
243 | case ASM_OPERANDS: | |
244 | if (MEM_VOLATILE_P (x)) | |
245 | return 1; | |
246 | ||
d632b59a | 247 | /* Fall through. */ |
a3c6603a | 248 | |
249 | default: | |
250 | break; | |
251 | } | |
635aff97 | 252 | |
253 | fmt = GET_RTX_FORMAT (code); | |
254 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
255 | if (fmt[i] == 'e') | |
cac0c8c9 | 256 | { |
257 | if (rtx_unstable_p (XEXP (x, i))) | |
258 | return 1; | |
259 | } | |
260 | else if (fmt[i] == 'E') | |
261 | { | |
262 | int j; | |
263 | for (j = 0; j < XVECLEN (x, i); j++) | |
264 | if (rtx_unstable_p (XVECEXP (x, i, j))) | |
265 | return 1; | |
266 | } | |
267 | ||
635aff97 | 268 | return 0; |
269 | } | |
270 | ||
271 | /* Return 1 if X has a value that can vary even between two | |
272 | executions of the program. 0 means X can be compared reliably | |
273 | against certain constants or near-constants. | |
ea087693 | 274 | FOR_ALIAS is nonzero if we are called from alias analysis; if it is |
275 | zero, we are slightly more conservative. | |
635aff97 | 276 | The frame pointer and the arg pointer are considered constant. */ |
277 | ||
52d07779 | 278 | bool |
279 | rtx_varies_p (const_rtx x, bool for_alias) | |
635aff97 | 280 | { |
70f5822c | 281 | RTX_CODE code; |
19cb6b50 | 282 | int i; |
283 | const char *fmt; | |
635aff97 | 284 | |
70f5822c | 285 | if (!x) |
286 | return 0; | |
287 | ||
288 | code = GET_CODE (x); | |
635aff97 | 289 | switch (code) |
290 | { | |
291 | case MEM: | |
b04fab2a | 292 | return !MEM_READONLY_P (x) || rtx_varies_p (XEXP (x, 0), for_alias); |
002fe3cb | 293 | |
635aff97 | 294 | case CONST: |
0349edce | 295 | CASE_CONST_ANY: |
635aff97 | 296 | case SYMBOL_REF: |
297 | case LABEL_REF: | |
298 | return 0; | |
299 | ||
300 | case REG: | |
301 | /* Note that we have to test for the actual rtx used for the frame | |
302 | and arg pointers and not just the register number in case we have | |
303 | eliminated the frame and/or arg pointer and are using it | |
304 | for pseudos. */ | |
d9c8e13d | 305 | if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx |
b8011969 | 306 | /* The arg pointer varies if it is not a fixed register. */ |
307 | || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])) | |
d9c8e13d | 308 | return 0; |
ea087693 | 309 | if (x == pic_offset_table_rtx |
ea087693 | 310 | /* ??? When call-clobbered, the value is stable modulo the restore |
311 | that must happen after a call. This currently screws up | |
312 | local-alloc into believing that the restore is not needed, so we | |
313 | must return 0 only if we are called from alias analysis. */ | |
260e669e | 314 | && (!PIC_OFFSET_TABLE_REG_CALL_CLOBBERED || for_alias)) |
ea087693 | 315 | return 0; |
d9c8e13d | 316 | return 1; |
635aff97 | 317 | |
318 | case LO_SUM: | |
319 | /* The operand 0 of a LO_SUM is considered constant | |
f7cd7994 | 320 | (in fact it is related specifically to operand 1) |
321 | during alias analysis. */ | |
322 | return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias)) | |
323 | || rtx_varies_p (XEXP (x, 1), for_alias); | |
2617fe26 | 324 | |
a3c6603a | 325 | case ASM_OPERANDS: |
326 | if (MEM_VOLATILE_P (x)) | |
327 | return 1; | |
328 | ||
d632b59a | 329 | /* Fall through. */ |
a3c6603a | 330 | |
0dbd1c74 | 331 | default: |
332 | break; | |
635aff97 | 333 | } |
334 | ||
335 | fmt = GET_RTX_FORMAT (code); | |
336 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
337 | if (fmt[i] == 'e') | |
cac0c8c9 | 338 | { |
ea087693 | 339 | if (rtx_varies_p (XEXP (x, i), for_alias)) |
cac0c8c9 | 340 | return 1; |
341 | } | |
342 | else if (fmt[i] == 'E') | |
343 | { | |
344 | int j; | |
345 | for (j = 0; j < XVECLEN (x, i); j++) | |
ea087693 | 346 | if (rtx_varies_p (XVECEXP (x, i, j), for_alias)) |
cac0c8c9 | 347 | return 1; |
348 | } | |
349 | ||
635aff97 | 350 | return 0; |
351 | } | |
352 | ||
be8108ee | 353 | /* Return nonzero if the use of X+OFFSET as an address in a MEM with SIZE |
354 | bytes can cause a trap. MODE is the mode of the MEM (not that of X) and | |
355 | UNALIGNED_MEMS controls whether nonzero is returned for unaligned memory | |
356 | references on strict alignment machines. */ | |
635aff97 | 357 | |
1aecae7f | 358 | static int |
0eee494e | 359 | rtx_addr_can_trap_p_1 (const_rtx x, HOST_WIDE_INT offset, HOST_WIDE_INT size, |
3754d046 | 360 | machine_mode mode, bool unaligned_mems) |
635aff97 | 361 | { |
19cb6b50 | 362 | enum rtx_code code = GET_CODE (x); |
635aff97 | 363 | |
be8108ee | 364 | /* The offset must be a multiple of the mode size if we are considering |
365 | unaligned memory references on strict alignment machines. */ | |
366 | if (STRICT_ALIGNMENT && unaligned_mems && GET_MODE_SIZE (mode) != 0) | |
0eee494e | 367 | { |
368 | HOST_WIDE_INT actual_offset = offset; | |
be8108ee | 369 | |
0eee494e | 370 | #ifdef SPARC_STACK_BOUNDARY_HACK |
371 | /* ??? The SPARC port may claim a STACK_BOUNDARY higher than | |
372 | the real alignment of %sp. However, when it does this, the | |
373 | alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */ | |
374 | if (SPARC_STACK_BOUNDARY_HACK | |
375 | && (x == stack_pointer_rtx || x == hard_frame_pointer_rtx)) | |
376 | actual_offset -= STACK_POINTER_OFFSET; | |
377 | #endif | |
378 | ||
99e9b19f | 379 | if (actual_offset % GET_MODE_SIZE (mode) != 0) |
380 | return 1; | |
0eee494e | 381 | } |
382 | ||
635aff97 | 383 | switch (code) |
384 | { | |
385 | case SYMBOL_REF: | |
0eee494e | 386 | if (SYMBOL_REF_WEAK (x)) |
387 | return 1; | |
388 | if (!CONSTANT_POOL_ADDRESS_P (x)) | |
389 | { | |
390 | tree decl; | |
391 | HOST_WIDE_INT decl_size; | |
392 | ||
393 | if (offset < 0) | |
394 | return 1; | |
395 | if (size == 0) | |
396 | size = GET_MODE_SIZE (mode); | |
397 | if (size == 0) | |
398 | return offset != 0; | |
399 | ||
400 | /* If the size of the access or of the symbol is unknown, | |
401 | assume the worst. */ | |
402 | decl = SYMBOL_REF_DECL (x); | |
403 | ||
404 | /* Else check that the access is in bounds. TODO: restructure | |
92ddcd97 | 405 | expr_size/tree_expr_size/int_expr_size and just use the latter. */ |
0eee494e | 406 | if (!decl) |
407 | decl_size = -1; | |
408 | else if (DECL_P (decl) && DECL_SIZE_UNIT (decl)) | |
e913b5cd | 409 | decl_size = (tree_fits_shwi_p (DECL_SIZE_UNIT (decl)) |
410 | ? tree_to_shwi (DECL_SIZE_UNIT (decl)) | |
0eee494e | 411 | : -1); |
412 | else if (TREE_CODE (decl) == STRING_CST) | |
413 | decl_size = TREE_STRING_LENGTH (decl); | |
414 | else if (TYPE_SIZE_UNIT (TREE_TYPE (decl))) | |
415 | decl_size = int_size_in_bytes (TREE_TYPE (decl)); | |
416 | else | |
417 | decl_size = -1; | |
418 | ||
419 | return (decl_size <= 0 ? offset != 0 : offset + size > decl_size); | |
420 | } | |
421 | ||
422 | return 0; | |
67f79732 | 423 | |
635aff97 | 424 | case LABEL_REF: |
635aff97 | 425 | return 0; |
426 | ||
427 | case REG: | |
be8108ee | 428 | /* Stack references are assumed not to trap, but we need to deal with |
429 | nonsensical offsets. */ | |
430 | if (x == frame_pointer_rtx) | |
431 | { | |
432 | HOST_WIDE_INT adj_offset = offset - STARTING_FRAME_OFFSET; | |
433 | if (size == 0) | |
434 | size = GET_MODE_SIZE (mode); | |
435 | if (FRAME_GROWS_DOWNWARD) | |
436 | { | |
437 | if (adj_offset < frame_offset || adj_offset + size - 1 >= 0) | |
438 | return 1; | |
439 | } | |
440 | else | |
441 | { | |
442 | if (adj_offset < 0 || adj_offset + size - 1 >= frame_offset) | |
443 | return 1; | |
444 | } | |
445 | return 0; | |
446 | } | |
447 | /* ??? Need to add a similar guard for nonsensical offsets. */ | |
448 | if (x == hard_frame_pointer_rtx | |
c0c2b734 | 449 | || x == stack_pointer_rtx |
450 | /* The arg pointer varies if it is not a fixed register. */ | |
451 | || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])) | |
452 | return 0; | |
453 | /* All of the virtual frame registers are stack references. */ | |
454 | if (REGNO (x) >= FIRST_VIRTUAL_REGISTER | |
455 | && REGNO (x) <= LAST_VIRTUAL_REGISTER) | |
456 | return 0; | |
457 | return 1; | |
635aff97 | 458 | |
459 | case CONST: | |
0eee494e | 460 | return rtx_addr_can_trap_p_1 (XEXP (x, 0), offset, size, |
461 | mode, unaligned_mems); | |
635aff97 | 462 | |
463 | case PLUS: | |
1aecae7f | 464 | /* An address is assumed not to trap if: |
0eee494e | 465 | - it is the pic register plus a constant. */ |
466 | if (XEXP (x, 0) == pic_offset_table_rtx && CONSTANT_P (XEXP (x, 1))) | |
467 | return 0; | |
468 | ||
be8108ee | 469 | /* - or it is an address that can't trap plus a constant integer. */ |
971ba038 | 470 | if (CONST_INT_P (XEXP (x, 1)) |
0eee494e | 471 | && !rtx_addr_can_trap_p_1 (XEXP (x, 0), offset + INTVAL (XEXP (x, 1)), |
472 | size, mode, unaligned_mems)) | |
1aecae7f | 473 | return 0; |
474 | ||
475 | return 1; | |
635aff97 | 476 | |
477 | case LO_SUM: | |
c0c2b734 | 478 | case PRE_MODIFY: |
0eee494e | 479 | return rtx_addr_can_trap_p_1 (XEXP (x, 1), offset, size, |
480 | mode, unaligned_mems); | |
c0c2b734 | 481 | |
482 | case PRE_DEC: | |
483 | case PRE_INC: | |
484 | case POST_DEC: | |
485 | case POST_INC: | |
486 | case POST_MODIFY: | |
0eee494e | 487 | return rtx_addr_can_trap_p_1 (XEXP (x, 0), offset, size, |
488 | mode, unaligned_mems); | |
c0c2b734 | 489 | |
0dbd1c74 | 490 | default: |
491 | break; | |
635aff97 | 492 | } |
493 | ||
494 | /* If it isn't one of the case above, it can cause a trap. */ | |
495 | return 1; | |
496 | } | |
497 | ||
1aecae7f | 498 | /* Return nonzero if the use of X as an address in a MEM can cause a trap. */ |
499 | ||
500 | int | |
dd9b9fc5 | 501 | rtx_addr_can_trap_p (const_rtx x) |
1aecae7f | 502 | { |
0eee494e | 503 | return rtx_addr_can_trap_p_1 (x, 0, 0, VOIDmode, false); |
1aecae7f | 504 | } |
505 | ||
805e22b2 | 506 | /* Return true if X is an address that is known to not be zero. */ |
507 | ||
508 | bool | |
dd9b9fc5 | 509 | nonzero_address_p (const_rtx x) |
805e22b2 | 510 | { |
dd9b9fc5 | 511 | const enum rtx_code code = GET_CODE (x); |
805e22b2 | 512 | |
513 | switch (code) | |
514 | { | |
515 | case SYMBOL_REF: | |
516 | return !SYMBOL_REF_WEAK (x); | |
517 | ||
518 | case LABEL_REF: | |
519 | return true; | |
520 | ||
805e22b2 | 521 | case REG: |
522 | /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */ | |
523 | if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx | |
524 | || x == stack_pointer_rtx | |
525 | || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])) | |
526 | return true; | |
527 | /* All of the virtual frame registers are stack references. */ | |
528 | if (REGNO (x) >= FIRST_VIRTUAL_REGISTER | |
529 | && REGNO (x) <= LAST_VIRTUAL_REGISTER) | |
530 | return true; | |
531 | return false; | |
532 | ||
533 | case CONST: | |
534 | return nonzero_address_p (XEXP (x, 0)); | |
535 | ||
536 | case PLUS: | |
805e22b2 | 537 | /* Handle PIC references. */ |
f1c575eb | 538 | if (XEXP (x, 0) == pic_offset_table_rtx |
805e22b2 | 539 | && CONSTANT_P (XEXP (x, 1))) |
540 | return true; | |
541 | return false; | |
542 | ||
543 | case PRE_MODIFY: | |
544 | /* Similar to the above; allow positive offsets. Further, since | |
545 | auto-inc is only allowed in memories, the register must be a | |
546 | pointer. */ | |
971ba038 | 547 | if (CONST_INT_P (XEXP (x, 1)) |
805e22b2 | 548 | && INTVAL (XEXP (x, 1)) > 0) |
549 | return true; | |
550 | return nonzero_address_p (XEXP (x, 0)); | |
551 | ||
552 | case PRE_INC: | |
553 | /* Similarly. Further, the offset is always positive. */ | |
554 | return true; | |
555 | ||
556 | case PRE_DEC: | |
557 | case POST_DEC: | |
558 | case POST_INC: | |
559 | case POST_MODIFY: | |
560 | return nonzero_address_p (XEXP (x, 0)); | |
561 | ||
562 | case LO_SUM: | |
563 | return nonzero_address_p (XEXP (x, 1)); | |
564 | ||
565 | default: | |
566 | break; | |
567 | } | |
568 | ||
569 | /* If it isn't one of the case above, might be zero. */ | |
570 | return false; | |
571 | } | |
572 | ||
2617fe26 | 573 | /* Return 1 if X refers to a memory location whose address |
635aff97 | 574 | cannot be compared reliably with constant addresses, |
2617fe26 | 575 | or if X refers to a BLKmode memory object. |
ea087693 | 576 | FOR_ALIAS is nonzero if we are called from alias analysis; if it is |
577 | zero, we are slightly more conservative. */ | |
635aff97 | 578 | |
52d07779 | 579 | bool |
580 | rtx_addr_varies_p (const_rtx x, bool for_alias) | |
635aff97 | 581 | { |
19cb6b50 | 582 | enum rtx_code code; |
583 | int i; | |
584 | const char *fmt; | |
635aff97 | 585 | |
586 | if (x == 0) | |
587 | return 0; | |
588 | ||
589 | code = GET_CODE (x); | |
590 | if (code == MEM) | |
ea087693 | 591 | return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias); |
635aff97 | 592 | |
593 | fmt = GET_RTX_FORMAT (code); | |
594 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
595 | if (fmt[i] == 'e') | |
cbea2709 | 596 | { |
ea087693 | 597 | if (rtx_addr_varies_p (XEXP (x, i), for_alias)) |
cbea2709 | 598 | return 1; |
599 | } | |
600 | else if (fmt[i] == 'E') | |
601 | { | |
602 | int j; | |
603 | for (j = 0; j < XVECLEN (x, i); j++) | |
ea087693 | 604 | if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias)) |
cbea2709 | 605 | return 1; |
606 | } | |
635aff97 | 607 | return 0; |
608 | } | |
609 | \f | |
cf7fb72d | 610 | /* Return the CALL in X if there is one. */ |
611 | ||
612 | rtx | |
613 | get_call_rtx_from (rtx x) | |
614 | { | |
615 | if (INSN_P (x)) | |
616 | x = PATTERN (x); | |
617 | if (GET_CODE (x) == PARALLEL) | |
618 | x = XVECEXP (x, 0, 0); | |
619 | if (GET_CODE (x) == SET) | |
620 | x = SET_SRC (x); | |
621 | if (GET_CODE (x) == CALL && MEM_P (XEXP (x, 0))) | |
622 | return x; | |
623 | return NULL_RTX; | |
624 | } | |
625 | \f | |
635aff97 | 626 | /* Return the value of the integer term in X, if one is apparent; |
627 | otherwise return 0. | |
628 | Only obvious integer terms are detected. | |
04641143 | 629 | This is used in cse.c with the `related_value' field. */ |
635aff97 | 630 | |
d3115c90 | 631 | HOST_WIDE_INT |
dd9b9fc5 | 632 | get_integer_term (const_rtx x) |
635aff97 | 633 | { |
634 | if (GET_CODE (x) == CONST) | |
635 | x = XEXP (x, 0); | |
636 | ||
637 | if (GET_CODE (x) == MINUS | |
971ba038 | 638 | && CONST_INT_P (XEXP (x, 1))) |
635aff97 | 639 | return - INTVAL (XEXP (x, 1)); |
640 | if (GET_CODE (x) == PLUS | |
971ba038 | 641 | && CONST_INT_P (XEXP (x, 1))) |
635aff97 | 642 | return INTVAL (XEXP (x, 1)); |
643 | return 0; | |
644 | } | |
645 | ||
646 | /* If X is a constant, return the value sans apparent integer term; | |
647 | otherwise return 0. | |
648 | Only obvious integer terms are detected. */ | |
649 | ||
650 | rtx | |
dd9b9fc5 | 651 | get_related_value (const_rtx x) |
635aff97 | 652 | { |
653 | if (GET_CODE (x) != CONST) | |
654 | return 0; | |
655 | x = XEXP (x, 0); | |
656 | if (GET_CODE (x) == PLUS | |
971ba038 | 657 | && CONST_INT_P (XEXP (x, 1))) |
635aff97 | 658 | return XEXP (x, 0); |
659 | else if (GET_CODE (x) == MINUS | |
971ba038 | 660 | && CONST_INT_P (XEXP (x, 1))) |
635aff97 | 661 | return XEXP (x, 0); |
662 | return 0; | |
663 | } | |
664 | \f | |
e0ab7256 | 665 | /* Return true if SYMBOL is a SYMBOL_REF and OFFSET + SYMBOL points |
666 | to somewhere in the same object or object_block as SYMBOL. */ | |
667 | ||
668 | bool | |
dd9b9fc5 | 669 | offset_within_block_p (const_rtx symbol, HOST_WIDE_INT offset) |
e0ab7256 | 670 | { |
671 | tree decl; | |
672 | ||
673 | if (GET_CODE (symbol) != SYMBOL_REF) | |
674 | return false; | |
675 | ||
676 | if (offset == 0) | |
677 | return true; | |
678 | ||
679 | if (offset > 0) | |
680 | { | |
681 | if (CONSTANT_POOL_ADDRESS_P (symbol) | |
682 | && offset < (int) GET_MODE_SIZE (get_pool_mode (symbol))) | |
683 | return true; | |
684 | ||
685 | decl = SYMBOL_REF_DECL (symbol); | |
686 | if (decl && offset < int_size_in_bytes (TREE_TYPE (decl))) | |
687 | return true; | |
688 | } | |
689 | ||
690 | if (SYMBOL_REF_HAS_BLOCK_INFO_P (symbol) | |
691 | && SYMBOL_REF_BLOCK (symbol) | |
692 | && SYMBOL_REF_BLOCK_OFFSET (symbol) >= 0 | |
693 | && ((unsigned HOST_WIDE_INT) offset + SYMBOL_REF_BLOCK_OFFSET (symbol) | |
694 | < (unsigned HOST_WIDE_INT) SYMBOL_REF_BLOCK (symbol)->size)) | |
695 | return true; | |
696 | ||
697 | return false; | |
698 | } | |
699 | ||
700 | /* Split X into a base and a constant offset, storing them in *BASE_OUT | |
701 | and *OFFSET_OUT respectively. */ | |
702 | ||
703 | void | |
704 | split_const (rtx x, rtx *base_out, rtx *offset_out) | |
705 | { | |
706 | if (GET_CODE (x) == CONST) | |
707 | { | |
708 | x = XEXP (x, 0); | |
971ba038 | 709 | if (GET_CODE (x) == PLUS && CONST_INT_P (XEXP (x, 1))) |
e0ab7256 | 710 | { |
711 | *base_out = XEXP (x, 0); | |
712 | *offset_out = XEXP (x, 1); | |
713 | return; | |
714 | } | |
715 | } | |
716 | *base_out = x; | |
717 | *offset_out = const0_rtx; | |
718 | } | |
719 | \f | |
40988080 | 720 | /* Return the number of places FIND appears within X. If COUNT_DEST is |
721 | zero, we do not count occurrences inside the destination of a SET. */ | |
722 | ||
723 | int | |
dd9b9fc5 | 724 | count_occurrences (const_rtx x, const_rtx find, int count_dest) |
40988080 | 725 | { |
726 | int i, j; | |
727 | enum rtx_code code; | |
728 | const char *format_ptr; | |
729 | int count; | |
730 | ||
731 | if (x == find) | |
732 | return 1; | |
733 | ||
734 | code = GET_CODE (x); | |
735 | ||
736 | switch (code) | |
737 | { | |
738 | case REG: | |
0349edce | 739 | CASE_CONST_ANY: |
40988080 | 740 | case SYMBOL_REF: |
741 | case CODE_LABEL: | |
742 | case PC: | |
743 | case CC0: | |
744 | return 0; | |
745 | ||
ac6a6c76 | 746 | case EXPR_LIST: |
747 | count = count_occurrences (XEXP (x, 0), find, count_dest); | |
748 | if (XEXP (x, 1)) | |
749 | count += count_occurrences (XEXP (x, 1), find, count_dest); | |
750 | return count; | |
48e1416a | 751 | |
40988080 | 752 | case MEM: |
e16ceb8e | 753 | if (MEM_P (find) && rtx_equal_p (x, find)) |
40988080 | 754 | return 1; |
755 | break; | |
756 | ||
757 | case SET: | |
758 | if (SET_DEST (x) == find && ! count_dest) | |
759 | return count_occurrences (SET_SRC (x), find, count_dest); | |
760 | break; | |
761 | ||
762 | default: | |
763 | break; | |
764 | } | |
765 | ||
766 | format_ptr = GET_RTX_FORMAT (code); | |
767 | count = 0; | |
768 | ||
769 | for (i = 0; i < GET_RTX_LENGTH (code); i++) | |
770 | { | |
771 | switch (*format_ptr++) | |
772 | { | |
773 | case 'e': | |
774 | count += count_occurrences (XEXP (x, i), find, count_dest); | |
775 | break; | |
776 | ||
777 | case 'E': | |
778 | for (j = 0; j < XVECLEN (x, i); j++) | |
779 | count += count_occurrences (XVECEXP (x, i, j), find, count_dest); | |
780 | break; | |
781 | } | |
782 | } | |
783 | return count; | |
784 | } | |
3072d30e | 785 | |
0a98b6d9 | 786 | \f |
787 | /* Return TRUE if OP is a register or subreg of a register that | |
788 | holds an unsigned quantity. Otherwise, return FALSE. */ | |
789 | ||
790 | bool | |
791 | unsigned_reg_p (rtx op) | |
792 | { | |
793 | if (REG_P (op) | |
794 | && REG_EXPR (op) | |
795 | && TYPE_UNSIGNED (TREE_TYPE (REG_EXPR (op)))) | |
796 | return true; | |
797 | ||
798 | if (GET_CODE (op) == SUBREG | |
e8629f9e | 799 | && SUBREG_PROMOTED_SIGN (op)) |
0a98b6d9 | 800 | return true; |
801 | ||
802 | return false; | |
803 | } | |
804 | ||
40988080 | 805 | \f |
635aff97 | 806 | /* Nonzero if register REG appears somewhere within IN. |
807 | Also works if REG is not a register; in this case it checks | |
808 | for a subexpression of IN that is Lisp "equal" to REG. */ | |
809 | ||
810 | int | |
dd9b9fc5 | 811 | reg_mentioned_p (const_rtx reg, const_rtx in) |
635aff97 | 812 | { |
19cb6b50 | 813 | const char *fmt; |
814 | int i; | |
815 | enum rtx_code code; | |
635aff97 | 816 | |
817 | if (in == 0) | |
818 | return 0; | |
819 | ||
820 | if (reg == in) | |
821 | return 1; | |
822 | ||
823 | if (GET_CODE (in) == LABEL_REF) | |
b49f2e4b | 824 | return reg == LABEL_REF_LABEL (in); |
635aff97 | 825 | |
826 | code = GET_CODE (in); | |
827 | ||
828 | switch (code) | |
829 | { | |
830 | /* Compare registers by number. */ | |
831 | case REG: | |
8ad4c111 | 832 | return REG_P (reg) && REGNO (in) == REGNO (reg); |
635aff97 | 833 | |
834 | /* These codes have no constituent expressions | |
835 | and are unique. */ | |
836 | case SCRATCH: | |
837 | case CC0: | |
838 | case PC: | |
839 | return 0; | |
840 | ||
0349edce | 841 | CASE_CONST_ANY: |
635aff97 | 842 | /* These are kept unique for a given value. */ |
843 | return 0; | |
2617fe26 | 844 | |
0dbd1c74 | 845 | default: |
846 | break; | |
635aff97 | 847 | } |
848 | ||
849 | if (GET_CODE (reg) == code && rtx_equal_p (reg, in)) | |
850 | return 1; | |
851 | ||
852 | fmt = GET_RTX_FORMAT (code); | |
853 | ||
854 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
855 | { | |
856 | if (fmt[i] == 'E') | |
857 | { | |
19cb6b50 | 858 | int j; |
635aff97 | 859 | for (j = XVECLEN (in, i) - 1; j >= 0; j--) |
860 | if (reg_mentioned_p (reg, XVECEXP (in, i, j))) | |
861 | return 1; | |
862 | } | |
863 | else if (fmt[i] == 'e' | |
864 | && reg_mentioned_p (reg, XEXP (in, i))) | |
865 | return 1; | |
866 | } | |
867 | return 0; | |
868 | } | |
869 | \f | |
870 | /* Return 1 if in between BEG and END, exclusive of BEG and END, there is | |
871 | no CODE_LABEL insn. */ | |
872 | ||
873 | int | |
91a55c11 | 874 | no_labels_between_p (const rtx_insn *beg, const rtx_insn *end) |
635aff97 | 875 | { |
91a55c11 | 876 | rtx_insn *p; |
62de2472 | 877 | if (beg == end) |
878 | return 0; | |
635aff97 | 879 | for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p)) |
6d7dc5b9 | 880 | if (LABEL_P (p)) |
635aff97 | 881 | return 0; |
882 | return 1; | |
883 | } | |
884 | ||
885 | /* Nonzero if register REG is used in an insn between | |
886 | FROM_INSN and TO_INSN (exclusive of those two). */ | |
887 | ||
888 | int | |
91a55c11 | 889 | reg_used_between_p (const_rtx reg, const rtx_insn *from_insn, |
890 | const rtx_insn *to_insn) | |
635aff97 | 891 | { |
200c2a8f | 892 | rtx_insn *insn; |
635aff97 | 893 | |
894 | if (from_insn == to_insn) | |
895 | return 0; | |
896 | ||
897 | for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn)) | |
9845d120 | 898 | if (NONDEBUG_INSN_P (insn) |
0c7201ca | 899 | && (reg_overlap_mentioned_p (reg, PATTERN (insn)) |
ecbd66eb | 900 | || (CALL_P (insn) && find_reg_fusage (insn, USE, reg)))) |
635aff97 | 901 | return 1; |
902 | return 0; | |
903 | } | |
904 | \f | |
905 | /* Nonzero if the old value of X, a register, is referenced in BODY. If X | |
906 | is entirely replaced by a new value and the only use is as a SET_DEST, | |
907 | we do not consider it a reference. */ | |
908 | ||
909 | int | |
dd9b9fc5 | 910 | reg_referenced_p (const_rtx x, const_rtx body) |
635aff97 | 911 | { |
912 | int i; | |
913 | ||
914 | switch (GET_CODE (body)) | |
915 | { | |
916 | case SET: | |
917 | if (reg_overlap_mentioned_p (x, SET_SRC (body))) | |
918 | return 1; | |
919 | ||
920 | /* If the destination is anything other than CC0, PC, a REG or a SUBREG | |
921 | of a REG that occupies all of the REG, the insn references X if | |
922 | it is mentioned in the destination. */ | |
923 | if (GET_CODE (SET_DEST (body)) != CC0 | |
924 | && GET_CODE (SET_DEST (body)) != PC | |
8ad4c111 | 925 | && !REG_P (SET_DEST (body)) |
635aff97 | 926 | && ! (GET_CODE (SET_DEST (body)) == SUBREG |
8ad4c111 | 927 | && REG_P (SUBREG_REG (SET_DEST (body))) |
635aff97 | 928 | && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body)))) |
929 | + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD) | |
930 | == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body))) | |
931 | + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))) | |
932 | && reg_overlap_mentioned_p (x, SET_DEST (body))) | |
933 | return 1; | |
0dbd1c74 | 934 | return 0; |
635aff97 | 935 | |
936 | case ASM_OPERANDS: | |
937 | for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--) | |
938 | if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i))) | |
939 | return 1; | |
0dbd1c74 | 940 | return 0; |
635aff97 | 941 | |
942 | case CALL: | |
943 | case USE: | |
155b05dc | 944 | case IF_THEN_ELSE: |
635aff97 | 945 | return reg_overlap_mentioned_p (x, body); |
946 | ||
947 | case TRAP_IF: | |
948 | return reg_overlap_mentioned_p (x, TRAP_CONDITION (body)); | |
949 | ||
9f449ed6 | 950 | case PREFETCH: |
951 | return reg_overlap_mentioned_p (x, XEXP (body, 0)); | |
952 | ||
3384a30e | 953 | case UNSPEC: |
954 | case UNSPEC_VOLATILE: | |
57d44d09 | 955 | for (i = XVECLEN (body, 0) - 1; i >= 0; i--) |
956 | if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i))) | |
957 | return 1; | |
958 | return 0; | |
959 | ||
635aff97 | 960 | case PARALLEL: |
961 | for (i = XVECLEN (body, 0) - 1; i >= 0; i--) | |
962 | if (reg_referenced_p (x, XVECEXP (body, 0, i))) | |
963 | return 1; | |
0dbd1c74 | 964 | return 0; |
2617fe26 | 965 | |
cccfd0f9 | 966 | case CLOBBER: |
e16ceb8e | 967 | if (MEM_P (XEXP (body, 0))) |
cccfd0f9 | 968 | if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0))) |
969 | return 1; | |
970 | return 0; | |
971 | ||
406034fa | 972 | case COND_EXEC: |
973 | if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body))) | |
974 | return 1; | |
975 | return reg_referenced_p (x, COND_EXEC_CODE (body)); | |
976 | ||
0dbd1c74 | 977 | default: |
978 | return 0; | |
635aff97 | 979 | } |
635aff97 | 980 | } |
635aff97 | 981 | \f |
982 | /* Nonzero if register REG is set or clobbered in an insn between | |
983 | FROM_INSN and TO_INSN (exclusive of those two). */ | |
984 | ||
985 | int | |
311f821c | 986 | reg_set_between_p (const_rtx reg, const rtx_insn *from_insn, |
987 | const rtx_insn *to_insn) | |
635aff97 | 988 | { |
200c2a8f | 989 | const rtx_insn *insn; |
635aff97 | 990 | |
991 | if (from_insn == to_insn) | |
992 | return 0; | |
993 | ||
994 | for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn)) | |
9204e736 | 995 | if (INSN_P (insn) && reg_set_p (reg, insn)) |
635aff97 | 996 | return 1; |
997 | return 0; | |
998 | } | |
999 | ||
1000 | /* Internals of reg_set_between_p. */ | |
635aff97 | 1001 | int |
7ecb5bb2 | 1002 | reg_set_p (const_rtx reg, const_rtx insn) |
635aff97 | 1003 | { |
8624ddf6 | 1004 | /* After delay slot handling, call and branch insns might be in a |
1005 | sequence. Check all the elements there. */ | |
1006 | if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE) | |
1007 | { | |
1008 | for (int i = 0; i < XVECLEN (PATTERN (insn), 0); ++i) | |
1009 | if (reg_set_p (reg, XVECEXP (PATTERN (insn), 0, i))) | |
1010 | return true; | |
1011 | ||
1012 | return false; | |
1013 | } | |
1014 | ||
635aff97 | 1015 | /* We can be passed an insn or part of one. If we are passed an insn, |
1016 | check if a side-effect of the insn clobbers REG. */ | |
805e22b2 | 1017 | if (INSN_P (insn) |
1018 | && (FIND_REG_INC_NOTE (insn, reg) | |
6d7dc5b9 | 1019 | || (CALL_P (insn) |
8ad4c111 | 1020 | && ((REG_P (reg) |
3c71a5cc | 1021 | && REGNO (reg) < FIRST_PSEUDO_REGISTER |
20128b13 | 1022 | && overlaps_hard_reg_set_p (regs_invalidated_by_call, |
1023 | GET_MODE (reg), REGNO (reg))) | |
e16ceb8e | 1024 | || MEM_P (reg) |
805e22b2 | 1025 | || find_reg_fusage (insn, CLOBBER, reg))))) |
8624ddf6 | 1026 | return true; |
635aff97 | 1027 | |
b7995d54 | 1028 | return set_of (reg, insn) != NULL_RTX; |
635aff97 | 1029 | } |
1030 | ||
1031 | /* Similar to reg_set_between_p, but check all registers in X. Return 0 | |
1032 | only if none of them are modified between START and END. Return 1 if | |
f0b5f617 | 1033 | X contains a MEM; this routine does use memory aliasing. */ |
635aff97 | 1034 | |
1035 | int | |
32482209 | 1036 | modified_between_p (const_rtx x, const rtx_insn *start, const rtx_insn *end) |
635aff97 | 1037 | { |
5493cb9a | 1038 | const enum rtx_code code = GET_CODE (x); |
d2ca078f | 1039 | const char *fmt; |
2c18b47c | 1040 | int i, j; |
200c2a8f | 1041 | rtx_insn *insn; |
c7bf7428 | 1042 | |
1043 | if (start == end) | |
1044 | return 0; | |
635aff97 | 1045 | |
1046 | switch (code) | |
1047 | { | |
0349edce | 1048 | CASE_CONST_ANY: |
635aff97 | 1049 | case CONST: |
1050 | case SYMBOL_REF: | |
1051 | case LABEL_REF: | |
1052 | return 0; | |
1053 | ||
1054 | case PC: | |
1055 | case CC0: | |
1056 | return 1; | |
1057 | ||
1058 | case MEM: | |
c7bf7428 | 1059 | if (modified_between_p (XEXP (x, 0), start, end)) |
635aff97 | 1060 | return 1; |
bf0ee60a | 1061 | if (MEM_READONLY_P (x)) |
1062 | return 0; | |
c7bf7428 | 1063 | for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn)) |
1064 | if (memory_modified_in_insn_p (x, insn)) | |
1065 | return 1; | |
1066 | return 0; | |
635aff97 | 1067 | break; |
1068 | ||
1069 | case REG: | |
1070 | return reg_set_between_p (x, start, end); | |
2617fe26 | 1071 | |
0dbd1c74 | 1072 | default: |
1073 | break; | |
635aff97 | 1074 | } |
1075 | ||
1076 | fmt = GET_RTX_FORMAT (code); | |
1077 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
2c18b47c | 1078 | { |
1079 | if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end)) | |
1080 | return 1; | |
1081 | ||
1bd8ca86 | 1082 | else if (fmt[i] == 'E') |
2c18b47c | 1083 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) |
1084 | if (modified_between_p (XVECEXP (x, i, j), start, end)) | |
1085 | return 1; | |
1086 | } | |
1087 | ||
1088 | return 0; | |
1089 | } | |
1090 | ||
1091 | /* Similar to reg_set_p, but check all registers in X. Return 0 only if none | |
1092 | of them are modified in INSN. Return 1 if X contains a MEM; this routine | |
c7bf7428 | 1093 | does use memory aliasing. */ |
2c18b47c | 1094 | |
1095 | int | |
5493cb9a | 1096 | modified_in_p (const_rtx x, const_rtx insn) |
2c18b47c | 1097 | { |
5493cb9a | 1098 | const enum rtx_code code = GET_CODE (x); |
d2ca078f | 1099 | const char *fmt; |
2c18b47c | 1100 | int i, j; |
1101 | ||
1102 | switch (code) | |
1103 | { | |
0349edce | 1104 | CASE_CONST_ANY: |
2c18b47c | 1105 | case CONST: |
1106 | case SYMBOL_REF: | |
1107 | case LABEL_REF: | |
1108 | return 0; | |
1109 | ||
1110 | case PC: | |
1111 | case CC0: | |
635aff97 | 1112 | return 1; |
1113 | ||
2c18b47c | 1114 | case MEM: |
c7bf7428 | 1115 | if (modified_in_p (XEXP (x, 0), insn)) |
2c18b47c | 1116 | return 1; |
bf0ee60a | 1117 | if (MEM_READONLY_P (x)) |
1118 | return 0; | |
c7bf7428 | 1119 | if (memory_modified_in_insn_p (x, insn)) |
1120 | return 1; | |
1121 | return 0; | |
2c18b47c | 1122 | break; |
1123 | ||
1124 | case REG: | |
1125 | return reg_set_p (x, insn); | |
0dbd1c74 | 1126 | |
1127 | default: | |
1128 | break; | |
2c18b47c | 1129 | } |
1130 | ||
1131 | fmt = GET_RTX_FORMAT (code); | |
1132 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
1133 | { | |
1134 | if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn)) | |
1135 | return 1; | |
1136 | ||
1bd8ca86 | 1137 | else if (fmt[i] == 'E') |
2c18b47c | 1138 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) |
1139 | if (modified_in_p (XVECEXP (x, i, j), insn)) | |
1140 | return 1; | |
1141 | } | |
1142 | ||
635aff97 | 1143 | return 0; |
1144 | } | |
1145 | \f | |
b7995d54 | 1146 | /* Helper function for set_of. */ |
1147 | struct set_of_data | |
1148 | { | |
81a410b1 | 1149 | const_rtx found; |
1150 | const_rtx pat; | |
b7995d54 | 1151 | }; |
1152 | ||
1153 | static void | |
81a410b1 | 1154 | set_of_1 (rtx x, const_rtx pat, void *data1) |
b7995d54 | 1155 | { |
81a410b1 | 1156 | struct set_of_data *const data = (struct set_of_data *) (data1); |
1157 | if (rtx_equal_p (x, data->pat) | |
1158 | || (!MEM_P (x) && reg_overlap_mentioned_p (data->pat, x))) | |
1159 | data->found = pat; | |
b7995d54 | 1160 | } |
1161 | ||
1162 | /* Give an INSN, return a SET or CLOBBER expression that does modify PAT | |
4a82352a | 1163 | (either directly or via STRICT_LOW_PART and similar modifiers). */ |
81a410b1 | 1164 | const_rtx |
1165 | set_of (const_rtx pat, const_rtx insn) | |
b7995d54 | 1166 | { |
1167 | struct set_of_data data; | |
1168 | data.found = NULL_RTX; | |
1169 | data.pat = pat; | |
1170 | note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data); | |
1171 | return data.found; | |
1172 | } | |
effd1640 | 1173 | |
e4442dc5 | 1174 | /* Add all hard register in X to *PSET. */ |
1175 | void | |
1176 | find_all_hard_regs (const_rtx x, HARD_REG_SET *pset) | |
1177 | { | |
1178 | subrtx_iterator::array_type array; | |
1179 | FOR_EACH_SUBRTX (iter, array, x, NONCONST) | |
1180 | { | |
1181 | const_rtx x = *iter; | |
1182 | if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER) | |
1183 | add_to_hard_reg_set (pset, GET_MODE (x), REGNO (x)); | |
1184 | } | |
1185 | } | |
1186 | ||
effd1640 | 1187 | /* This function, called through note_stores, collects sets and |
1188 | clobbers of hard registers in a HARD_REG_SET, which is pointed to | |
1189 | by DATA. */ | |
1190 | void | |
1191 | record_hard_reg_sets (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data) | |
1192 | { | |
1193 | HARD_REG_SET *pset = (HARD_REG_SET *)data; | |
1194 | if (REG_P (x) && HARD_REGISTER_P (x)) | |
1195 | add_to_hard_reg_set (pset, GET_MODE (x), REGNO (x)); | |
1196 | } | |
1197 | ||
1198 | /* Examine INSN, and compute the set of hard registers written by it. | |
1199 | Store it in *PSET. Should only be called after reload. */ | |
1200 | void | |
b9452872 | 1201 | find_all_hard_reg_sets (const rtx_insn *insn, HARD_REG_SET *pset, bool implicit) |
effd1640 | 1202 | { |
1203 | rtx link; | |
1204 | ||
1205 | CLEAR_HARD_REG_SET (*pset); | |
1206 | note_stores (PATTERN (insn), record_hard_reg_sets, pset); | |
1207 | if (CALL_P (insn)) | |
6792947d | 1208 | { |
1209 | if (implicit) | |
1210 | IOR_HARD_REG_SET (*pset, call_used_reg_set); | |
1211 | ||
1212 | for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1)) | |
1213 | record_hard_reg_sets (XEXP (link, 0), NULL, pset); | |
1214 | } | |
effd1640 | 1215 | for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) |
1216 | if (REG_NOTE_KIND (link) == REG_INC) | |
1217 | record_hard_reg_sets (XEXP (link, 0), NULL, pset); | |
1218 | } | |
1219 | ||
effd1640 | 1220 | /* Like record_hard_reg_sets, but called through note_uses. */ |
1221 | void | |
1222 | record_hard_reg_uses (rtx *px, void *data) | |
1223 | { | |
e4442dc5 | 1224 | find_all_hard_regs (*px, (HARD_REG_SET *) data); |
effd1640 | 1225 | } |
b7995d54 | 1226 | \f |
635aff97 | 1227 | /* Given an INSN, return a SET expression if this insn has only a single SET. |
1228 | It may also have CLOBBERs, USEs, or SET whose output | |
1229 | will not be used, which we ignore. */ | |
1230 | ||
1231 | rtx | |
50fc2d35 | 1232 | single_set_2 (const rtx_insn *insn, const_rtx pat) |
635aff97 | 1233 | { |
b6590daa | 1234 | rtx set = NULL; |
1235 | int set_verified = 1; | |
635aff97 | 1236 | int i; |
b6590daa | 1237 | |
6531eca9 | 1238 | if (GET_CODE (pat) == PARALLEL) |
635aff97 | 1239 | { |
b6590daa | 1240 | for (i = 0; i < XVECLEN (pat, 0); i++) |
6531eca9 | 1241 | { |
b6590daa | 1242 | rtx sub = XVECEXP (pat, 0, i); |
1243 | switch (GET_CODE (sub)) | |
1244 | { | |
1245 | case USE: | |
1246 | case CLOBBER: | |
1247 | break; | |
1248 | ||
1249 | case SET: | |
1250 | /* We can consider insns having multiple sets, where all | |
1251 | but one are dead as single set insns. In common case | |
1252 | only single set is present in the pattern so we want | |
dd5b4b36 | 1253 | to avoid checking for REG_UNUSED notes unless necessary. |
b6590daa | 1254 | |
1255 | When we reach set first time, we just expect this is | |
1256 | the single set we are looking for and only when more | |
1257 | sets are found in the insn, we check them. */ | |
1258 | if (!set_verified) | |
1259 | { | |
1260 | if (find_reg_note (insn, REG_UNUSED, SET_DEST (set)) | |
1261 | && !side_effects_p (set)) | |
1262 | set = NULL; | |
1263 | else | |
1264 | set_verified = 1; | |
1265 | } | |
1266 | if (!set) | |
1267 | set = sub, set_verified = 0; | |
1268 | else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub)) | |
1269 | || side_effects_p (sub)) | |
1270 | return NULL_RTX; | |
1271 | break; | |
1272 | ||
1273 | default: | |
1274 | return NULL_RTX; | |
1275 | } | |
93127143 | 1276 | } |
635aff97 | 1277 | } |
b6590daa | 1278 | return set; |
635aff97 | 1279 | } |
e6cae665 | 1280 | |
1281 | /* Given an INSN, return nonzero if it has more than one SET, else return | |
1282 | zero. */ | |
1283 | ||
21b510d8 | 1284 | int |
dd9b9fc5 | 1285 | multiple_sets (const_rtx insn) |
e6cae665 | 1286 | { |
2c641110 | 1287 | int found; |
e6cae665 | 1288 | int i; |
2617fe26 | 1289 | |
e6cae665 | 1290 | /* INSN must be an insn. */ |
9204e736 | 1291 | if (! INSN_P (insn)) |
e6cae665 | 1292 | return 0; |
1293 | ||
1294 | /* Only a PARALLEL can have multiple SETs. */ | |
1295 | if (GET_CODE (PATTERN (insn)) == PARALLEL) | |
1296 | { | |
1297 | for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++) | |
1298 | if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET) | |
1299 | { | |
1300 | /* If we have already found a SET, then return now. */ | |
1301 | if (found) | |
1302 | return 1; | |
1303 | else | |
1304 | found = 1; | |
1305 | } | |
1306 | } | |
2617fe26 | 1307 | |
e6cae665 | 1308 | /* Either zero or one SET. */ |
1309 | return 0; | |
1310 | } | |
635aff97 | 1311 | \f |
c955554c | 1312 | /* Return nonzero if the destination of SET equals the source |
1313 | and there are no side effects. */ | |
1314 | ||
1315 | int | |
dd9b9fc5 | 1316 | set_noop_p (const_rtx set) |
c955554c | 1317 | { |
1318 | rtx src = SET_SRC (set); | |
1319 | rtx dst = SET_DEST (set); | |
1320 | ||
675b92cc | 1321 | if (dst == pc_rtx && src == pc_rtx) |
1322 | return 1; | |
1323 | ||
e16ceb8e | 1324 | if (MEM_P (dst) && MEM_P (src)) |
53fffe66 | 1325 | return rtx_equal_p (dst, src) && !side_effects_p (dst); |
1326 | ||
476d094d | 1327 | if (GET_CODE (dst) == ZERO_EXTRACT) |
c955554c | 1328 | return rtx_equal_p (XEXP (dst, 0), src) |
53fffe66 | 1329 | && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx |
1330 | && !side_effects_p (src); | |
c955554c | 1331 | |
1332 | if (GET_CODE (dst) == STRICT_LOW_PART) | |
1333 | dst = XEXP (dst, 0); | |
1334 | ||
1335 | if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG) | |
1336 | { | |
1337 | if (SUBREG_BYTE (src) != SUBREG_BYTE (dst)) | |
1338 | return 0; | |
1339 | src = SUBREG_REG (src); | |
1340 | dst = SUBREG_REG (dst); | |
1341 | } | |
1342 | ||
b2bb3848 | 1343 | /* It is a NOOP if destination overlaps with selected src vector |
1344 | elements. */ | |
1345 | if (GET_CODE (src) == VEC_SELECT | |
1346 | && REG_P (XEXP (src, 0)) && REG_P (dst) | |
1347 | && HARD_REGISTER_P (XEXP (src, 0)) | |
1348 | && HARD_REGISTER_P (dst)) | |
1349 | { | |
1350 | int i; | |
1351 | rtx par = XEXP (src, 1); | |
1352 | rtx src0 = XEXP (src, 0); | |
1353 | int c0 = INTVAL (XVECEXP (par, 0, 0)); | |
1354 | HOST_WIDE_INT offset = GET_MODE_UNIT_SIZE (GET_MODE (src0)) * c0; | |
1355 | ||
1356 | for (i = 1; i < XVECLEN (par, 0); i++) | |
1357 | if (INTVAL (XVECEXP (par, 0, i)) != c0 + i) | |
1358 | return 0; | |
1359 | return | |
1360 | simplify_subreg_regno (REGNO (src0), GET_MODE (src0), | |
1361 | offset, GET_MODE (dst)) == (int) REGNO (dst); | |
1362 | } | |
1363 | ||
8ad4c111 | 1364 | return (REG_P (src) && REG_P (dst) |
c955554c | 1365 | && REGNO (src) == REGNO (dst)); |
1366 | } | |
b08cd584 | 1367 | \f |
1368 | /* Return nonzero if an insn consists only of SETs, each of which only sets a | |
1369 | value to itself. */ | |
1370 | ||
1371 | int | |
e79ab52b | 1372 | noop_move_p (const rtx_insn *insn) |
b08cd584 | 1373 | { |
1374 | rtx pat = PATTERN (insn); | |
1375 | ||
1805c35c | 1376 | if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE) |
1377 | return 1; | |
1378 | ||
b08cd584 | 1379 | /* Insns carrying these notes are useful later on. */ |
1380 | if (find_reg_note (insn, REG_EQUAL, NULL_RTX)) | |
1381 | return 0; | |
1382 | ||
3f0def8e | 1383 | /* Check the code to be executed for COND_EXEC. */ |
1384 | if (GET_CODE (pat) == COND_EXEC) | |
1385 | pat = COND_EXEC_CODE (pat); | |
1386 | ||
b08cd584 | 1387 | if (GET_CODE (pat) == SET && set_noop_p (pat)) |
1388 | return 1; | |
1389 | ||
1390 | if (GET_CODE (pat) == PARALLEL) | |
1391 | { | |
1392 | int i; | |
1393 | /* If nothing but SETs of registers to themselves, | |
1394 | this insn can also be deleted. */ | |
1395 | for (i = 0; i < XVECLEN (pat, 0); i++) | |
1396 | { | |
1397 | rtx tem = XVECEXP (pat, 0, i); | |
1398 | ||
1399 | if (GET_CODE (tem) == USE | |
1400 | || GET_CODE (tem) == CLOBBER) | |
1401 | continue; | |
1402 | ||
1403 | if (GET_CODE (tem) != SET || ! set_noop_p (tem)) | |
1404 | return 0; | |
1405 | } | |
1406 | ||
1407 | return 1; | |
1408 | } | |
1409 | return 0; | |
1410 | } | |
1411 | \f | |
c955554c | 1412 | |
635aff97 | 1413 | /* Return nonzero if register in range [REGNO, ENDREGNO) |
1414 | appears either explicitly or implicitly in X | |
1415 | other than being stored into. | |
1416 | ||
1417 | References contained within the substructure at LOC do not count. | |
1418 | LOC may be zero, meaning don't ignore anything. */ | |
1419 | ||
2ec77a7c | 1420 | bool |
dd9b9fc5 | 1421 | refers_to_regno_p (unsigned int regno, unsigned int endregno, const_rtx x, |
3ad4992f | 1422 | rtx *loc) |
635aff97 | 1423 | { |
02e7a332 | 1424 | int i; |
1425 | unsigned int x_regno; | |
1426 | RTX_CODE code; | |
1427 | const char *fmt; | |
635aff97 | 1428 | |
1429 | repeat: | |
1430 | /* The contents of a REG_NONNEG note is always zero, so we must come here | |
1431 | upon repeat in case the last REG_NOTE is a REG_NONNEG note. */ | |
1432 | if (x == 0) | |
2ec77a7c | 1433 | return false; |
635aff97 | 1434 | |
1435 | code = GET_CODE (x); | |
1436 | ||
1437 | switch (code) | |
1438 | { | |
1439 | case REG: | |
02e7a332 | 1440 | x_regno = REGNO (x); |
2c18b47c | 1441 | |
1442 | /* If we modifying the stack, frame, or argument pointer, it will | |
1443 | clobber a virtual register. In fact, we could be more precise, | |
1444 | but it isn't worth it. */ | |
02e7a332 | 1445 | if ((x_regno == STACK_POINTER_REGNUM |
c6bb296a | 1446 | || (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM |
1447 | && x_regno == ARG_POINTER_REGNUM) | |
02e7a332 | 1448 | || x_regno == FRAME_POINTER_REGNUM) |
2c18b47c | 1449 | && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER) |
2ec77a7c | 1450 | return true; |
2c18b47c | 1451 | |
a2c6f0b7 | 1452 | return endregno > x_regno && regno < END_REGNO (x); |
635aff97 | 1453 | |
1454 | case SUBREG: | |
1455 | /* If this is a SUBREG of a hard reg, we can see exactly which | |
1456 | registers are being modified. Otherwise, handle normally. */ | |
8ad4c111 | 1457 | if (REG_P (SUBREG_REG (x)) |
635aff97 | 1458 | && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER) |
1459 | { | |
701e46d0 | 1460 | unsigned int inner_regno = subreg_regno (x); |
02e7a332 | 1461 | unsigned int inner_endregno |
aee171c8 | 1462 | = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER |
fe2ebfc8 | 1463 | ? subreg_nregs (x) : 1); |
635aff97 | 1464 | |
1465 | return endregno > inner_regno && regno < inner_endregno; | |
1466 | } | |
1467 | break; | |
1468 | ||
1469 | case CLOBBER: | |
1470 | case SET: | |
1471 | if (&SET_DEST (x) != loc | |
1472 | /* Note setting a SUBREG counts as referring to the REG it is in for | |
1473 | a pseudo but not for hard registers since we can | |
1474 | treat each word individually. */ | |
1475 | && ((GET_CODE (SET_DEST (x)) == SUBREG | |
1476 | && loc != &SUBREG_REG (SET_DEST (x)) | |
8ad4c111 | 1477 | && REG_P (SUBREG_REG (SET_DEST (x))) |
635aff97 | 1478 | && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER |
1479 | && refers_to_regno_p (regno, endregno, | |
1480 | SUBREG_REG (SET_DEST (x)), loc)) | |
8ad4c111 | 1481 | || (!REG_P (SET_DEST (x)) |
635aff97 | 1482 | && refers_to_regno_p (regno, endregno, SET_DEST (x), loc)))) |
2ec77a7c | 1483 | return true; |
635aff97 | 1484 | |
1485 | if (code == CLOBBER || loc == &SET_SRC (x)) | |
2ec77a7c | 1486 | return false; |
635aff97 | 1487 | x = SET_SRC (x); |
1488 | goto repeat; | |
0dbd1c74 | 1489 | |
1490 | default: | |
1491 | break; | |
635aff97 | 1492 | } |
1493 | ||
1494 | /* X does not match, so try its subexpressions. */ | |
1495 | ||
1496 | fmt = GET_RTX_FORMAT (code); | |
1497 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
1498 | { | |
1499 | if (fmt[i] == 'e' && loc != &XEXP (x, i)) | |
1500 | { | |
1501 | if (i == 0) | |
1502 | { | |
1503 | x = XEXP (x, 0); | |
1504 | goto repeat; | |
1505 | } | |
1506 | else | |
1507 | if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc)) | |
2ec77a7c | 1508 | return true; |
635aff97 | 1509 | } |
1510 | else if (fmt[i] == 'E') | |
1511 | { | |
19cb6b50 | 1512 | int j; |
ea0041f4 | 1513 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) |
635aff97 | 1514 | if (loc != &XVECEXP (x, i, j) |
1515 | && refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc)) | |
2ec77a7c | 1516 | return true; |
635aff97 | 1517 | } |
1518 | } | |
2ec77a7c | 1519 | return false; |
635aff97 | 1520 | } |
1521 | ||
1522 | /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG, | |
1523 | we check if any register number in X conflicts with the relevant register | |
1524 | numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN | |
1525 | contains a MEM (we don't bother checking for memory addresses that can't | |
1526 | conflict because we expect this to be a rare case. */ | |
1527 | ||
1528 | int | |
dd9b9fc5 | 1529 | reg_overlap_mentioned_p (const_rtx x, const_rtx in) |
635aff97 | 1530 | { |
02e7a332 | 1531 | unsigned int regno, endregno; |
635aff97 | 1532 | |
2f3f2ddf | 1533 | /* If either argument is a constant, then modifying X can not |
1534 | affect IN. Here we look at IN, we can profitably combine | |
1535 | CONSTANT_P (x) with the switch statement below. */ | |
1536 | if (CONSTANT_P (in)) | |
8eb7d6fc | 1537 | return 0; |
406034fa | 1538 | |
2f3f2ddf | 1539 | recurse: |
406034fa | 1540 | switch (GET_CODE (x)) |
635aff97 | 1541 | { |
2f3f2ddf | 1542 | case STRICT_LOW_PART: |
1543 | case ZERO_EXTRACT: | |
1544 | case SIGN_EXTRACT: | |
1545 | /* Overly conservative. */ | |
1546 | x = XEXP (x, 0); | |
1547 | goto recurse; | |
1548 | ||
406034fa | 1549 | case SUBREG: |
635aff97 | 1550 | regno = REGNO (SUBREG_REG (x)); |
1551 | if (regno < FIRST_PSEUDO_REGISTER) | |
701e46d0 | 1552 | regno = subreg_regno (x); |
fe2ebfc8 | 1553 | endregno = regno + (regno < FIRST_PSEUDO_REGISTER |
1554 | ? subreg_nregs (x) : 1); | |
406034fa | 1555 | goto do_reg; |
635aff97 | 1556 | |
406034fa | 1557 | case REG: |
1558 | regno = REGNO (x); | |
a2c6f0b7 | 1559 | endregno = END_REGNO (x); |
fe2ebfc8 | 1560 | do_reg: |
337d789b | 1561 | return refers_to_regno_p (regno, endregno, in, (rtx*) 0); |
635aff97 | 1562 | |
406034fa | 1563 | case MEM: |
1564 | { | |
1565 | const char *fmt; | |
1566 | int i; | |
635aff97 | 1567 | |
e16ceb8e | 1568 | if (MEM_P (in)) |
635aff97 | 1569 | return 1; |
1570 | ||
406034fa | 1571 | fmt = GET_RTX_FORMAT (GET_CODE (in)); |
1572 | for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--) | |
bc99e194 | 1573 | if (fmt[i] == 'e') |
1574 | { | |
1575 | if (reg_overlap_mentioned_p (x, XEXP (in, i))) | |
1576 | return 1; | |
1577 | } | |
1578 | else if (fmt[i] == 'E') | |
1579 | { | |
1580 | int j; | |
1581 | for (j = XVECLEN (in, i) - 1; j >= 0; --j) | |
1582 | if (reg_overlap_mentioned_p (x, XVECEXP (in, i, j))) | |
1583 | return 1; | |
1584 | } | |
3a1b2351 | 1585 | |
406034fa | 1586 | return 0; |
1587 | } | |
1588 | ||
1589 | case SCRATCH: | |
1590 | case PC: | |
1591 | case CC0: | |
1592 | return reg_mentioned_p (x, in); | |
1593 | ||
1594 | case PARALLEL: | |
e291e4ee | 1595 | { |
216b2683 | 1596 | int i; |
e291e4ee | 1597 | |
1598 | /* If any register in here refers to it we return true. */ | |
4b303227 | 1599 | for (i = XVECLEN (x, 0) - 1; i >= 0; i--) |
1600 | if (XEXP (XVECEXP (x, 0, i), 0) != 0 | |
1601 | && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in)) | |
2f3f2ddf | 1602 | return 1; |
4b303227 | 1603 | return 0; |
e291e4ee | 1604 | } |
635aff97 | 1605 | |
406034fa | 1606 | default: |
04e579b6 | 1607 | gcc_assert (CONSTANT_P (x)); |
2f3f2ddf | 1608 | return 0; |
1609 | } | |
635aff97 | 1610 | } |
1611 | \f | |
635aff97 | 1612 | /* Call FUN on each register or MEM that is stored into or clobbered by X. |
02a63053 | 1613 | (X would be the pattern of an insn). DATA is an arbitrary pointer, |
1614 | ignored by note_stores, but passed to FUN. | |
1615 | ||
1616 | FUN receives three arguments: | |
1617 | 1. the REG, MEM, CC0 or PC being stored in or clobbered, | |
1618 | 2. the SET or CLOBBER rtx that does the store, | |
1619 | 3. the pointer DATA provided to note_stores. | |
635aff97 | 1620 | |
1621 | If the item being stored in or clobbered is a SUBREG of a hard register, | |
1622 | the SUBREG will be passed. */ | |
2617fe26 | 1623 | |
635aff97 | 1624 | void |
81a410b1 | 1625 | note_stores (const_rtx x, void (*fun) (rtx, const_rtx, void *), void *data) |
635aff97 | 1626 | { |
a80f1c6c | 1627 | int i; |
216b2683 | 1628 | |
a80f1c6c | 1629 | if (GET_CODE (x) == COND_EXEC) |
1630 | x = COND_EXEC_CODE (x); | |
216b2683 | 1631 | |
a80f1c6c | 1632 | if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER) |
1633 | { | |
1634 | rtx dest = SET_DEST (x); | |
1635 | ||
1636 | while ((GET_CODE (dest) == SUBREG | |
1637 | && (!REG_P (SUBREG_REG (dest)) | |
1638 | || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER)) | |
1639 | || GET_CODE (dest) == ZERO_EXTRACT | |
1640 | || GET_CODE (dest) == STRICT_LOW_PART) | |
1641 | dest = XEXP (dest, 0); | |
1642 | ||
1643 | /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions, | |
1644 | each of whose first operand is a register. */ | |
1645 | if (GET_CODE (dest) == PARALLEL) | |
1646 | { | |
1647 | for (i = XVECLEN (dest, 0) - 1; i >= 0; i--) | |
1648 | if (XEXP (XVECEXP (dest, 0, i), 0) != 0) | |
1649 | (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data); | |
1650 | } | |
1651 | else | |
1652 | (*fun) (dest, x, data); | |
1653 | } | |
02e7a332 | 1654 | |
a80f1c6c | 1655 | else if (GET_CODE (x) == PARALLEL) |
1656 | for (i = XVECLEN (x, 0) - 1; i >= 0; i--) | |
1657 | note_stores (XVECEXP (x, 0, i), fun, data); | |
1658 | } | |
635aff97 | 1659 | \f |
99b86c05 | 1660 | /* Like notes_stores, but call FUN for each expression that is being |
1661 | referenced in PBODY, a pointer to the PATTERN of an insn. We only call | |
1662 | FUN for each expression, not any interior subexpressions. FUN receives a | |
1663 | pointer to the expression and the DATA passed to this function. | |
1664 | ||
1665 | Note that this is not quite the same test as that done in reg_referenced_p | |
1666 | since that considers something as being referenced if it is being | |
1667 | partially set, while we do not. */ | |
1668 | ||
1669 | void | |
3ad4992f | 1670 | note_uses (rtx *pbody, void (*fun) (rtx *, void *), void *data) |
99b86c05 | 1671 | { |
1672 | rtx body = *pbody; | |
1673 | int i; | |
1674 | ||
1675 | switch (GET_CODE (body)) | |
1676 | { | |
1677 | case COND_EXEC: | |
1678 | (*fun) (&COND_EXEC_TEST (body), data); | |
1679 | note_uses (&COND_EXEC_CODE (body), fun, data); | |
1680 | return; | |
1681 | ||
1682 | case PARALLEL: | |
1683 | for (i = XVECLEN (body, 0) - 1; i >= 0; i--) | |
1684 | note_uses (&XVECEXP (body, 0, i), fun, data); | |
1685 | return; | |
1686 | ||
48df5a7f | 1687 | case SEQUENCE: |
1688 | for (i = XVECLEN (body, 0) - 1; i >= 0; i--) | |
1689 | note_uses (&PATTERN (XVECEXP (body, 0, i)), fun, data); | |
1690 | return; | |
1691 | ||
99b86c05 | 1692 | case USE: |
1693 | (*fun) (&XEXP (body, 0), data); | |
1694 | return; | |
1695 | ||
1696 | case ASM_OPERANDS: | |
1697 | for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--) | |
1698 | (*fun) (&ASM_OPERANDS_INPUT (body, i), data); | |
1699 | return; | |
1700 | ||
1701 | case TRAP_IF: | |
1702 | (*fun) (&TRAP_CONDITION (body), data); | |
1703 | return; | |
1704 | ||
9f449ed6 | 1705 | case PREFETCH: |
1706 | (*fun) (&XEXP (body, 0), data); | |
1707 | return; | |
1708 | ||
99b86c05 | 1709 | case UNSPEC: |
1710 | case UNSPEC_VOLATILE: | |
1711 | for (i = XVECLEN (body, 0) - 1; i >= 0; i--) | |
1712 | (*fun) (&XVECEXP (body, 0, i), data); | |
1713 | return; | |
1714 | ||
1715 | case CLOBBER: | |
e16ceb8e | 1716 | if (MEM_P (XEXP (body, 0))) |
99b86c05 | 1717 | (*fun) (&XEXP (XEXP (body, 0), 0), data); |
1718 | return; | |
1719 | ||
1720 | case SET: | |
1721 | { | |
1722 | rtx dest = SET_DEST (body); | |
1723 | ||
1724 | /* For sets we replace everything in source plus registers in memory | |
1725 | expression in store and operands of a ZERO_EXTRACT. */ | |
1726 | (*fun) (&SET_SRC (body), data); | |
1727 | ||
1728 | if (GET_CODE (dest) == ZERO_EXTRACT) | |
1729 | { | |
1730 | (*fun) (&XEXP (dest, 1), data); | |
1731 | (*fun) (&XEXP (dest, 2), data); | |
1732 | } | |
1733 | ||
1734 | while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART) | |
1735 | dest = XEXP (dest, 0); | |
1736 | ||
e16ceb8e | 1737 | if (MEM_P (dest)) |
99b86c05 | 1738 | (*fun) (&XEXP (dest, 0), data); |
1739 | } | |
1740 | return; | |
1741 | ||
1742 | default: | |
1743 | /* All the other possibilities never store. */ | |
1744 | (*fun) (pbody, data); | |
1745 | return; | |
1746 | } | |
1747 | } | |
1748 | \f | |
635aff97 | 1749 | /* Return nonzero if X's old contents don't survive after INSN. |
1750 | This will be true if X is (cc0) or if X is a register and | |
1751 | X dies in INSN or because INSN entirely sets X. | |
1752 | ||
476d094d | 1753 | "Entirely set" means set directly and not through a SUBREG, or |
1754 | ZERO_EXTRACT, so no trace of the old contents remains. | |
635aff97 | 1755 | Likewise, REG_INC does not count. |
1756 | ||
1757 | REG may be a hard or pseudo reg. Renumbering is not taken into account, | |
1758 | but for this use that makes no difference, since regs don't overlap | |
1759 | during their lifetimes. Therefore, this function may be used | |
3072d30e | 1760 | at any time after deaths have been computed. |
635aff97 | 1761 | |
1762 | If REG is a hard reg that occupies multiple machine registers, this | |
1763 | function will only return 1 if each of those registers will be replaced | |
1764 | by INSN. */ | |
1765 | ||
1766 | int | |
dd9b9fc5 | 1767 | dead_or_set_p (const_rtx insn, const_rtx x) |
635aff97 | 1768 | { |
a2c6f0b7 | 1769 | unsigned int regno, end_regno; |
02e7a332 | 1770 | unsigned int i; |
635aff97 | 1771 | |
1772 | /* Can't use cc0_rtx below since this file is used by genattrtab.c. */ | |
1773 | if (GET_CODE (x) == CC0) | |
1774 | return 1; | |
1775 | ||
04e579b6 | 1776 | gcc_assert (REG_P (x)); |
635aff97 | 1777 | |
1778 | regno = REGNO (x); | |
a2c6f0b7 | 1779 | end_regno = END_REGNO (x); |
1780 | for (i = regno; i < end_regno; i++) | |
635aff97 | 1781 | if (! dead_or_set_regno_p (insn, i)) |
1782 | return 0; | |
1783 | ||
1784 | return 1; | |
1785 | } | |
1786 | ||
30c74f6d | 1787 | /* Return TRUE iff DEST is a register or subreg of a register and |
1788 | doesn't change the number of words of the inner register, and any | |
1789 | part of the register is TEST_REGNO. */ | |
1790 | ||
1791 | static bool | |
dd9b9fc5 | 1792 | covers_regno_no_parallel_p (const_rtx dest, unsigned int test_regno) |
30c74f6d | 1793 | { |
1794 | unsigned int regno, endregno; | |
1795 | ||
1796 | if (GET_CODE (dest) == SUBREG | |
1797 | && (((GET_MODE_SIZE (GET_MODE (dest)) | |
1798 | + UNITS_PER_WORD - 1) / UNITS_PER_WORD) | |
1799 | == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) | |
1800 | + UNITS_PER_WORD - 1) / UNITS_PER_WORD))) | |
1801 | dest = SUBREG_REG (dest); | |
1802 | ||
1803 | if (!REG_P (dest)) | |
1804 | return false; | |
1805 | ||
1806 | regno = REGNO (dest); | |
a2c6f0b7 | 1807 | endregno = END_REGNO (dest); |
30c74f6d | 1808 | return (test_regno >= regno && test_regno < endregno); |
1809 | } | |
1810 | ||
1811 | /* Like covers_regno_no_parallel_p, but also handles PARALLELs where | |
1812 | any member matches the covers_regno_no_parallel_p criteria. */ | |
1813 | ||
1814 | static bool | |
dd9b9fc5 | 1815 | covers_regno_p (const_rtx dest, unsigned int test_regno) |
30c74f6d | 1816 | { |
1817 | if (GET_CODE (dest) == PARALLEL) | |
1818 | { | |
1819 | /* Some targets place small structures in registers for return | |
1820 | values of functions, and those registers are wrapped in | |
1821 | PARALLELs that we may see as the destination of a SET. */ | |
1822 | int i; | |
1823 | ||
1824 | for (i = XVECLEN (dest, 0) - 1; i >= 0; i--) | |
1825 | { | |
1826 | rtx inner = XEXP (XVECEXP (dest, 0, i), 0); | |
1827 | if (inner != NULL_RTX | |
1828 | && covers_regno_no_parallel_p (inner, test_regno)) | |
1829 | return true; | |
1830 | } | |
1831 | ||
1832 | return false; | |
1833 | } | |
1834 | else | |
1835 | return covers_regno_no_parallel_p (dest, test_regno); | |
1836 | } | |
1837 | ||
3072d30e | 1838 | /* Utility function for dead_or_set_p to check an individual register. */ |
635aff97 | 1839 | |
1840 | int | |
dd9b9fc5 | 1841 | dead_or_set_regno_p (const_rtx insn, unsigned int test_regno) |
635aff97 | 1842 | { |
dd9b9fc5 | 1843 | const_rtx pattern; |
635aff97 | 1844 | |
eb1c92fb | 1845 | /* See if there is a death note for something that includes TEST_REGNO. */ |
1846 | if (find_regno_note (insn, REG_DEAD, test_regno)) | |
1847 | return 1; | |
635aff97 | 1848 | |
6d7dc5b9 | 1849 | if (CALL_P (insn) |
0c7201ca | 1850 | && find_regno_fusage (insn, CLOBBER, test_regno)) |
1851 | return 1; | |
1852 | ||
406034fa | 1853 | pattern = PATTERN (insn); |
1854 | ||
d6e8850f | 1855 | /* If a COND_EXEC is not executed, the value survives. */ |
406034fa | 1856 | if (GET_CODE (pattern) == COND_EXEC) |
d6e8850f | 1857 | return 0; |
406034fa | 1858 | |
1859 | if (GET_CODE (pattern) == SET) | |
30c74f6d | 1860 | return covers_regno_p (SET_DEST (pattern), test_regno); |
406034fa | 1861 | else if (GET_CODE (pattern) == PARALLEL) |
635aff97 | 1862 | { |
19cb6b50 | 1863 | int i; |
635aff97 | 1864 | |
406034fa | 1865 | for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--) |
635aff97 | 1866 | { |
406034fa | 1867 | rtx body = XVECEXP (pattern, 0, i); |
1868 | ||
1869 | if (GET_CODE (body) == COND_EXEC) | |
1870 | body = COND_EXEC_CODE (body); | |
635aff97 | 1871 | |
30c74f6d | 1872 | if ((GET_CODE (body) == SET || GET_CODE (body) == CLOBBER) |
1873 | && covers_regno_p (SET_DEST (body), test_regno)) | |
1874 | return 1; | |
635aff97 | 1875 | } |
1876 | } | |
1877 | ||
1878 | return 0; | |
1879 | } | |
1880 | ||
1881 | /* Return the reg-note of kind KIND in insn INSN, if there is one. | |
1882 | If DATUM is nonzero, look for one whose datum is DATUM. */ | |
1883 | ||
1884 | rtx | |
dd9b9fc5 | 1885 | find_reg_note (const_rtx insn, enum reg_note kind, const_rtx datum) |
635aff97 | 1886 | { |
19cb6b50 | 1887 | rtx link; |
635aff97 | 1888 | |
0ea2d350 | 1889 | gcc_checking_assert (insn); |
62d6a022 | 1890 | |
49a945b8 | 1891 | /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */ |
9204e736 | 1892 | if (! INSN_P (insn)) |
49a945b8 | 1893 | return 0; |
b2a24b6f | 1894 | if (datum == 0) |
1895 | { | |
1896 | for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) | |
1897 | if (REG_NOTE_KIND (link) == kind) | |
1898 | return link; | |
1899 | return 0; | |
1900 | } | |
49a945b8 | 1901 | |
635aff97 | 1902 | for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) |
b2a24b6f | 1903 | if (REG_NOTE_KIND (link) == kind && datum == XEXP (link, 0)) |
635aff97 | 1904 | return link; |
1905 | return 0; | |
1906 | } | |
1907 | ||
1908 | /* Return the reg-note of kind KIND in insn INSN which applies to register | |
da5c9e5f | 1909 | number REGNO, if any. Return 0 if there is no such reg-note. Note that |
1910 | the REGNO of this NOTE need not be REGNO if REGNO is a hard register; | |
1911 | it might be the case that the note overlaps REGNO. */ | |
635aff97 | 1912 | |
1913 | rtx | |
dd9b9fc5 | 1914 | find_regno_note (const_rtx insn, enum reg_note kind, unsigned int regno) |
635aff97 | 1915 | { |
19cb6b50 | 1916 | rtx link; |
635aff97 | 1917 | |
49a945b8 | 1918 | /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */ |
9204e736 | 1919 | if (! INSN_P (insn)) |
49a945b8 | 1920 | return 0; |
1921 | ||
635aff97 | 1922 | for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) |
1923 | if (REG_NOTE_KIND (link) == kind | |
1924 | /* Verify that it is a register, so that scratch and MEM won't cause a | |
1925 | problem here. */ | |
8ad4c111 | 1926 | && REG_P (XEXP (link, 0)) |
da5c9e5f | 1927 | && REGNO (XEXP (link, 0)) <= regno |
a2c6f0b7 | 1928 | && END_REGNO (XEXP (link, 0)) > regno) |
635aff97 | 1929 | return link; |
1930 | return 0; | |
1931 | } | |
0c7201ca | 1932 | |
53cb61a7 | 1933 | /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and |
1934 | has such a note. */ | |
1935 | ||
1936 | rtx | |
dd9b9fc5 | 1937 | find_reg_equal_equiv_note (const_rtx insn) |
53cb61a7 | 1938 | { |
53fffe66 | 1939 | rtx link; |
53cb61a7 | 1940 | |
53fffe66 | 1941 | if (!INSN_P (insn)) |
53cb61a7 | 1942 | return 0; |
e4f51d19 | 1943 | |
53fffe66 | 1944 | for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) |
1945 | if (REG_NOTE_KIND (link) == REG_EQUAL | |
1946 | || REG_NOTE_KIND (link) == REG_EQUIV) | |
1947 | { | |
e4f51d19 | 1948 | /* FIXME: We should never have REG_EQUAL/REG_EQUIV notes on |
1949 | insns that have multiple sets. Checking single_set to | |
1950 | make sure of this is not the proper check, as explained | |
1951 | in the comment in set_unique_reg_note. | |
1952 | ||
1953 | This should be changed into an assert. */ | |
1954 | if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn)) | |
53fffe66 | 1955 | return 0; |
1956 | return link; | |
1957 | } | |
1958 | return NULL; | |
53cb61a7 | 1959 | } |
1960 | ||
3aba99c8 | 1961 | /* Check whether INSN is a single_set whose source is known to be |
1962 | equivalent to a constant. Return that constant if so, otherwise | |
1963 | return null. */ | |
1964 | ||
1965 | rtx | |
93ee8dfb | 1966 | find_constant_src (const rtx_insn *insn) |
3aba99c8 | 1967 | { |
1968 | rtx note, set, x; | |
1969 | ||
1970 | set = single_set (insn); | |
1971 | if (set) | |
1972 | { | |
1973 | x = avoid_constant_pool_reference (SET_SRC (set)); | |
1974 | if (CONSTANT_P (x)) | |
1975 | return x; | |
1976 | } | |
1977 | ||
1978 | note = find_reg_equal_equiv_note (insn); | |
1979 | if (note && CONSTANT_P (XEXP (note, 0))) | |
1980 | return XEXP (note, 0); | |
1981 | ||
1982 | return NULL_RTX; | |
1983 | } | |
1984 | ||
0c7201ca | 1985 | /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found |
1986 | in the CALL_INSN_FUNCTION_USAGE information of INSN. */ | |
1987 | ||
1988 | int | |
dd9b9fc5 | 1989 | find_reg_fusage (const_rtx insn, enum rtx_code code, const_rtx datum) |
0c7201ca | 1990 | { |
1991 | /* If it's not a CALL_INSN, it can't possibly have a | |
1992 | CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */ | |
6d7dc5b9 | 1993 | if (!CALL_P (insn)) |
0c7201ca | 1994 | return 0; |
1995 | ||
04e579b6 | 1996 | gcc_assert (datum); |
0c7201ca | 1997 | |
8ad4c111 | 1998 | if (!REG_P (datum)) |
0c7201ca | 1999 | { |
19cb6b50 | 2000 | rtx link; |
0c7201ca | 2001 | |
2002 | for (link = CALL_INSN_FUNCTION_USAGE (insn); | |
2617fe26 | 2003 | link; |
0c7201ca | 2004 | link = XEXP (link, 1)) |
2617fe26 | 2005 | if (GET_CODE (XEXP (link, 0)) == code |
ff90a874 | 2006 | && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0))) |
2617fe26 | 2007 | return 1; |
0c7201ca | 2008 | } |
2009 | else | |
2010 | { | |
02e7a332 | 2011 | unsigned int regno = REGNO (datum); |
0c7201ca | 2012 | |
2013 | /* CALL_INSN_FUNCTION_USAGE information cannot contain references | |
2014 | to pseudo registers, so don't bother checking. */ | |
2015 | ||
2016 | if (regno < FIRST_PSEUDO_REGISTER) | |
2617fe26 | 2017 | { |
788bed51 | 2018 | unsigned int end_regno = END_REGNO (datum); |
02e7a332 | 2019 | unsigned int i; |
0c7201ca | 2020 | |
2021 | for (i = regno; i < end_regno; i++) | |
2022 | if (find_regno_fusage (insn, code, i)) | |
2023 | return 1; | |
2617fe26 | 2024 | } |
0c7201ca | 2025 | } |
2026 | ||
2027 | return 0; | |
2028 | } | |
2029 | ||
2030 | /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found | |
2031 | in the CALL_INSN_FUNCTION_USAGE information of INSN. */ | |
2032 | ||
2033 | int | |
dd9b9fc5 | 2034 | find_regno_fusage (const_rtx insn, enum rtx_code code, unsigned int regno) |
0c7201ca | 2035 | { |
19cb6b50 | 2036 | rtx link; |
0c7201ca | 2037 | |
2038 | /* CALL_INSN_FUNCTION_USAGE information cannot contain references | |
2039 | to pseudo registers, so don't bother checking. */ | |
2040 | ||
2041 | if (regno >= FIRST_PSEUDO_REGISTER | |
6d7dc5b9 | 2042 | || !CALL_P (insn) ) |
0c7201ca | 2043 | return 0; |
2044 | ||
2045 | for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1)) | |
005d995b | 2046 | { |
02e7a332 | 2047 | rtx op, reg; |
005d995b | 2048 | |
2049 | if (GET_CODE (op = XEXP (link, 0)) == code | |
8ad4c111 | 2050 | && REG_P (reg = XEXP (op, 0)) |
a2c6f0b7 | 2051 | && REGNO (reg) <= regno |
788bed51 | 2052 | && END_REGNO (reg) > regno) |
005d995b | 2053 | return 1; |
2054 | } | |
0c7201ca | 2055 | |
2056 | return 0; | |
2057 | } | |
ef15379a | 2058 | |
635aff97 | 2059 | \f |
9eb946de | 2060 | /* Return true if KIND is an integer REG_NOTE. */ |
2061 | ||
2062 | static bool | |
2063 | int_reg_note_p (enum reg_note kind) | |
2064 | { | |
2065 | return kind == REG_BR_PROB; | |
2066 | } | |
2067 | ||
5859ee98 | 2068 | /* Allocate a register note with kind KIND and datum DATUM. LIST is |
2069 | stored as the pointer to the next register note. */ | |
a1ddb869 | 2070 | |
5859ee98 | 2071 | rtx |
2072 | alloc_reg_note (enum reg_note kind, rtx datum, rtx list) | |
a1ddb869 | 2073 | { |
2074 | rtx note; | |
2075 | ||
9eb946de | 2076 | gcc_checking_assert (!int_reg_note_p (kind)); |
a1ddb869 | 2077 | switch (kind) |
2078 | { | |
2079 | case REG_CC_SETTER: | |
2080 | case REG_CC_USER: | |
2081 | case REG_LABEL_TARGET: | |
2082 | case REG_LABEL_OPERAND: | |
4c0315d0 | 2083 | case REG_TM: |
a1ddb869 | 2084 | /* These types of register notes use an INSN_LIST rather than an |
2085 | EXPR_LIST, so that copying is done right and dumps look | |
2086 | better. */ | |
5859ee98 | 2087 | note = alloc_INSN_LIST (datum, list); |
a1ddb869 | 2088 | PUT_REG_NOTE_KIND (note, kind); |
2089 | break; | |
2090 | ||
2091 | default: | |
5859ee98 | 2092 | note = alloc_EXPR_LIST (kind, datum, list); |
a1ddb869 | 2093 | break; |
2094 | } | |
2095 | ||
5859ee98 | 2096 | return note; |
2097 | } | |
2098 | ||
2099 | /* Add register note with kind KIND and datum DATUM to INSN. */ | |
2100 | ||
2101 | void | |
2102 | add_reg_note (rtx insn, enum reg_note kind, rtx datum) | |
2103 | { | |
2104 | REG_NOTES (insn) = alloc_reg_note (kind, datum, REG_NOTES (insn)); | |
a1ddb869 | 2105 | } |
2106 | ||
9eb946de | 2107 | /* Add an integer register note with kind KIND and datum DATUM to INSN. */ |
2108 | ||
2109 | void | |
2110 | add_int_reg_note (rtx insn, enum reg_note kind, int datum) | |
2111 | { | |
2112 | gcc_checking_assert (int_reg_note_p (kind)); | |
3754d046 | 2113 | REG_NOTES (insn) = gen_rtx_INT_LIST ((machine_mode) kind, |
9eb946de | 2114 | datum, REG_NOTES (insn)); |
2115 | } | |
2116 | ||
2117 | /* Add a register note like NOTE to INSN. */ | |
2118 | ||
2119 | void | |
ca336a81 | 2120 | add_shallow_copy_of_reg_note (rtx_insn *insn, rtx note) |
9eb946de | 2121 | { |
2122 | if (GET_CODE (note) == INT_LIST) | |
2123 | add_int_reg_note (insn, REG_NOTE_KIND (note), XINT (note, 0)); | |
2124 | else | |
2125 | add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0)); | |
2126 | } | |
2127 | ||
635aff97 | 2128 | /* Remove register note NOTE from the REG_NOTES of INSN. */ |
2129 | ||
2130 | void | |
dd9b9fc5 | 2131 | remove_note (rtx insn, const_rtx note) |
635aff97 | 2132 | { |
19cb6b50 | 2133 | rtx link; |
635aff97 | 2134 | |
def93098 | 2135 | if (note == NULL_RTX) |
2136 | return; | |
2137 | ||
635aff97 | 2138 | if (REG_NOTES (insn) == note) |
3072d30e | 2139 | REG_NOTES (insn) = XEXP (note, 1); |
2140 | else | |
2141 | for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) | |
2142 | if (XEXP (link, 1) == note) | |
2143 | { | |
2144 | XEXP (link, 1) = XEXP (note, 1); | |
2145 | break; | |
2146 | } | |
2147 | ||
2148 | switch (REG_NOTE_KIND (note)) | |
635aff97 | 2149 | { |
3072d30e | 2150 | case REG_EQUAL: |
2151 | case REG_EQUIV: | |
e149ca56 | 2152 | df_notes_rescan (as_a <rtx_insn *> (insn)); |
3072d30e | 2153 | break; |
2154 | default: | |
2155 | break; | |
635aff97 | 2156 | } |
635aff97 | 2157 | } |
13d60e7c | 2158 | |
f16feee2 | 2159 | /* Remove REG_EQUAL and/or REG_EQUIV notes if INSN has such notes. */ |
2160 | ||
2161 | void | |
6dbed5c7 | 2162 | remove_reg_equal_equiv_notes (rtx_insn *insn) |
f16feee2 | 2163 | { |
2164 | rtx *loc; | |
2165 | ||
2166 | loc = ®_NOTES (insn); | |
2167 | while (*loc) | |
2168 | { | |
2169 | enum reg_note kind = REG_NOTE_KIND (*loc); | |
2170 | if (kind == REG_EQUAL || kind == REG_EQUIV) | |
2171 | *loc = XEXP (*loc, 1); | |
2172 | else | |
2173 | loc = &XEXP (*loc, 1); | |
2174 | } | |
2175 | } | |
09669349 | 2176 | |
2177 | /* Remove all REG_EQUAL and REG_EQUIV notes referring to REGNO. */ | |
2178 | ||
2179 | void | |
2180 | remove_reg_equal_equiv_notes_for_regno (unsigned int regno) | |
2181 | { | |
2182 | df_ref eq_use; | |
2183 | ||
2184 | if (!df) | |
2185 | return; | |
2186 | ||
2187 | /* This loop is a little tricky. We cannot just go down the chain because | |
2188 | it is being modified by some actions in the loop. So we just iterate | |
2189 | over the head. We plan to drain the list anyway. */ | |
2190 | while ((eq_use = DF_REG_EQ_USE_CHAIN (regno)) != NULL) | |
2191 | { | |
200c2a8f | 2192 | rtx_insn *insn = DF_REF_INSN (eq_use); |
09669349 | 2193 | rtx note = find_reg_equal_equiv_note (insn); |
2194 | ||
2195 | /* This assert is generally triggered when someone deletes a REG_EQUAL | |
2196 | or REG_EQUIV note by hacking the list manually rather than calling | |
2197 | remove_note. */ | |
2198 | gcc_assert (note); | |
2199 | ||
2200 | remove_note (insn, note); | |
2201 | } | |
2202 | } | |
f16feee2 | 2203 | |
5cc577b6 | 2204 | /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and |
2205 | return 1 if it is found. A simple equality test is used to determine if | |
2206 | NODE matches. */ | |
2207 | ||
7a680a39 | 2208 | bool |
2209 | in_insn_list_p (const rtx_insn_list *listp, const rtx_insn *node) | |
5cc577b6 | 2210 | { |
dd9b9fc5 | 2211 | const_rtx x; |
5cc577b6 | 2212 | |
2213 | for (x = listp; x; x = XEXP (x, 1)) | |
2214 | if (node == XEXP (x, 0)) | |
7a680a39 | 2215 | return true; |
5cc577b6 | 2216 | |
7a680a39 | 2217 | return false; |
5cc577b6 | 2218 | } |
2219 | ||
badb6da9 | 2220 | /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and |
2221 | remove that entry from the list if it is found. | |
13d60e7c | 2222 | |
badb6da9 | 2223 | A simple equality test is used to determine if NODE matches. */ |
13d60e7c | 2224 | |
2225 | void | |
6e16e157 | 2226 | remove_node_from_expr_list (const_rtx node, rtx_expr_list **listp) |
13d60e7c | 2227 | { |
6e16e157 | 2228 | rtx_expr_list *temp = *listp; |
13d60e7c | 2229 | rtx prev = NULL_RTX; |
2230 | ||
2231 | while (temp) | |
2232 | { | |
6e16e157 | 2233 | if (node == temp->element ()) |
13d60e7c | 2234 | { |
2235 | /* Splice the node out of the list. */ | |
2236 | if (prev) | |
6e16e157 | 2237 | XEXP (prev, 1) = temp->next (); |
13d60e7c | 2238 | else |
6e16e157 | 2239 | *listp = temp->next (); |
13d60e7c | 2240 | |
2241 | return; | |
2242 | } | |
badb6da9 | 2243 | |
2244 | prev = temp; | |
6e16e157 | 2245 | temp = temp->next (); |
13d60e7c | 2246 | } |
2247 | } | |
a4de1c23 | 2248 | |
2249 | /* Search LISTP (an INSN_LIST) for an entry whose first operand is NODE and | |
2250 | remove that entry from the list if it is found. | |
2251 | ||
2252 | A simple equality test is used to determine if NODE matches. */ | |
2253 | ||
2254 | void | |
2255 | remove_node_from_insn_list (const rtx_insn *node, rtx_insn_list **listp) | |
2256 | { | |
2257 | rtx_insn_list *temp = *listp; | |
2258 | rtx prev = NULL; | |
2259 | ||
2260 | while (temp) | |
2261 | { | |
2262 | if (node == temp->insn ()) | |
2263 | { | |
2264 | /* Splice the node out of the list. */ | |
2265 | if (prev) | |
2266 | XEXP (prev, 1) = temp->next (); | |
2267 | else | |
2268 | *listp = temp->next (); | |
2269 | ||
2270 | return; | |
2271 | } | |
2272 | ||
2273 | prev = temp; | |
2274 | temp = temp->next (); | |
2275 | } | |
2276 | } | |
635aff97 | 2277 | \f |
ea275ef9 | 2278 | /* Nonzero if X contains any volatile instructions. These are instructions |
2279 | which may cause unpredictable machine state instructions, and thus no | |
e12b44a3 | 2280 | instructions or register uses should be moved or combined across them. |
2281 | This includes only volatile asms and UNSPEC_VOLATILE instructions. */ | |
ea275ef9 | 2282 | |
2283 | int | |
dd9b9fc5 | 2284 | volatile_insn_p (const_rtx x) |
ea275ef9 | 2285 | { |
dd9b9fc5 | 2286 | const RTX_CODE code = GET_CODE (x); |
ea275ef9 | 2287 | switch (code) |
2288 | { | |
2289 | case LABEL_REF: | |
2290 | case SYMBOL_REF: | |
ea275ef9 | 2291 | case CONST: |
0349edce | 2292 | CASE_CONST_ANY: |
ea275ef9 | 2293 | case CC0: |
2294 | case PC: | |
2295 | case REG: | |
2296 | case SCRATCH: | |
2297 | case CLOBBER: | |
ea275ef9 | 2298 | case ADDR_VEC: |
2299 | case ADDR_DIFF_VEC: | |
2300 | case CALL: | |
2301 | case MEM: | |
2302 | return 0; | |
2303 | ||
2304 | case UNSPEC_VOLATILE: | |
ea275ef9 | 2305 | return 1; |
2306 | ||
c52051b7 | 2307 | case ASM_INPUT: |
ea275ef9 | 2308 | case ASM_OPERANDS: |
2309 | if (MEM_VOLATILE_P (x)) | |
2310 | return 1; | |
0dbd1c74 | 2311 | |
2312 | default: | |
2313 | break; | |
ea275ef9 | 2314 | } |
2315 | ||
2316 | /* Recursively scan the operands of this expression. */ | |
2317 | ||
2318 | { | |
dd9b9fc5 | 2319 | const char *const fmt = GET_RTX_FORMAT (code); |
19cb6b50 | 2320 | int i; |
2617fe26 | 2321 | |
ea275ef9 | 2322 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) |
2323 | { | |
2324 | if (fmt[i] == 'e') | |
2325 | { | |
c1dadb43 | 2326 | if (volatile_insn_p (XEXP (x, i))) |
ea275ef9 | 2327 | return 1; |
2328 | } | |
1bd8ca86 | 2329 | else if (fmt[i] == 'E') |
ea275ef9 | 2330 | { |
19cb6b50 | 2331 | int j; |
ea275ef9 | 2332 | for (j = 0; j < XVECLEN (x, i); j++) |
c1dadb43 | 2333 | if (volatile_insn_p (XVECEXP (x, i, j))) |
ea275ef9 | 2334 | return 1; |
2335 | } | |
2336 | } | |
2337 | } | |
2338 | return 0; | |
2339 | } | |
2340 | ||
635aff97 | 2341 | /* Nonzero if X contains any volatile memory references |
3384a30e | 2342 | UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */ |
635aff97 | 2343 | |
2344 | int | |
dd9b9fc5 | 2345 | volatile_refs_p (const_rtx x) |
635aff97 | 2346 | { |
dd9b9fc5 | 2347 | const RTX_CODE code = GET_CODE (x); |
635aff97 | 2348 | switch (code) |
2349 | { | |
2350 | case LABEL_REF: | |
2351 | case SYMBOL_REF: | |
635aff97 | 2352 | case CONST: |
0349edce | 2353 | CASE_CONST_ANY: |
635aff97 | 2354 | case CC0: |
2355 | case PC: | |
2356 | case REG: | |
2357 | case SCRATCH: | |
2358 | case CLOBBER: | |
635aff97 | 2359 | case ADDR_VEC: |
2360 | case ADDR_DIFF_VEC: | |
2361 | return 0; | |
2362 | ||
3384a30e | 2363 | case UNSPEC_VOLATILE: |
635aff97 | 2364 | return 1; |
2365 | ||
2366 | case MEM: | |
c52051b7 | 2367 | case ASM_INPUT: |
635aff97 | 2368 | case ASM_OPERANDS: |
2369 | if (MEM_VOLATILE_P (x)) | |
2370 | return 1; | |
0dbd1c74 | 2371 | |
2372 | default: | |
2373 | break; | |
635aff97 | 2374 | } |
2375 | ||
2376 | /* Recursively scan the operands of this expression. */ | |
2377 | ||
2378 | { | |
dd9b9fc5 | 2379 | const char *const fmt = GET_RTX_FORMAT (code); |
19cb6b50 | 2380 | int i; |
2617fe26 | 2381 | |
635aff97 | 2382 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) |
2383 | { | |
2384 | if (fmt[i] == 'e') | |
2385 | { | |
2386 | if (volatile_refs_p (XEXP (x, i))) | |
2387 | return 1; | |
2388 | } | |
1bd8ca86 | 2389 | else if (fmt[i] == 'E') |
635aff97 | 2390 | { |
19cb6b50 | 2391 | int j; |
635aff97 | 2392 | for (j = 0; j < XVECLEN (x, i); j++) |
2393 | if (volatile_refs_p (XVECEXP (x, i, j))) | |
2394 | return 1; | |
2395 | } | |
2396 | } | |
2397 | } | |
2398 | return 0; | |
2399 | } | |
2400 | ||
2401 | /* Similar to above, except that it also rejects register pre- and post- | |
2402 | incrementing. */ | |
2403 | ||
2404 | int | |
dd9b9fc5 | 2405 | side_effects_p (const_rtx x) |
635aff97 | 2406 | { |
dd9b9fc5 | 2407 | const RTX_CODE code = GET_CODE (x); |
635aff97 | 2408 | switch (code) |
2409 | { | |
2410 | case LABEL_REF: | |
2411 | case SYMBOL_REF: | |
635aff97 | 2412 | case CONST: |
0349edce | 2413 | CASE_CONST_ANY: |
635aff97 | 2414 | case CC0: |
2415 | case PC: | |
2416 | case REG: | |
2417 | case SCRATCH: | |
635aff97 | 2418 | case ADDR_VEC: |
2419 | case ADDR_DIFF_VEC: | |
9845d120 | 2420 | case VAR_LOCATION: |
635aff97 | 2421 | return 0; |
2422 | ||
2423 | case CLOBBER: | |
2424 | /* Reject CLOBBER with a non-VOID mode. These are made by combine.c | |
2425 | when some combination can't be done. If we see one, don't think | |
2426 | that we can simplify the expression. */ | |
2427 | return (GET_MODE (x) != VOIDmode); | |
2428 | ||
2429 | case PRE_INC: | |
2430 | case PRE_DEC: | |
2431 | case POST_INC: | |
2432 | case POST_DEC: | |
a3da8215 | 2433 | case PRE_MODIFY: |
2434 | case POST_MODIFY: | |
635aff97 | 2435 | case CALL: |
3384a30e | 2436 | case UNSPEC_VOLATILE: |
635aff97 | 2437 | return 1; |
2438 | ||
2439 | case MEM: | |
c52051b7 | 2440 | case ASM_INPUT: |
635aff97 | 2441 | case ASM_OPERANDS: |
2442 | if (MEM_VOLATILE_P (x)) | |
2443 | return 1; | |
0dbd1c74 | 2444 | |
2445 | default: | |
2446 | break; | |
635aff97 | 2447 | } |
2448 | ||
2449 | /* Recursively scan the operands of this expression. */ | |
2450 | ||
2451 | { | |
19cb6b50 | 2452 | const char *fmt = GET_RTX_FORMAT (code); |
2453 | int i; | |
2617fe26 | 2454 | |
635aff97 | 2455 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) |
2456 | { | |
2457 | if (fmt[i] == 'e') | |
2458 | { | |
2459 | if (side_effects_p (XEXP (x, i))) | |
2460 | return 1; | |
2461 | } | |
1bd8ca86 | 2462 | else if (fmt[i] == 'E') |
635aff97 | 2463 | { |
19cb6b50 | 2464 | int j; |
635aff97 | 2465 | for (j = 0; j < XVECLEN (x, i); j++) |
2466 | if (side_effects_p (XVECEXP (x, i, j))) | |
2467 | return 1; | |
2468 | } | |
2469 | } | |
2470 | } | |
2471 | return 0; | |
2472 | } | |
2473 | \f | |
5ed26a34 | 2474 | /* Return nonzero if evaluating rtx X might cause a trap. |
0eee494e | 2475 | FLAGS controls how to consider MEMs. A nonzero means the context |
2476 | of the access may have changed from the original, such that the | |
2477 | address may have become invalid. */ | |
635aff97 | 2478 | |
77ad8e5a | 2479 | int |
dd9b9fc5 | 2480 | may_trap_p_1 (const_rtx x, unsigned flags) |
635aff97 | 2481 | { |
2482 | int i; | |
2483 | enum rtx_code code; | |
d2ca078f | 2484 | const char *fmt; |
0eee494e | 2485 | |
2486 | /* We make no distinction currently, but this function is part of | |
2487 | the internal target-hooks ABI so we keep the parameter as | |
2488 | "unsigned flags". */ | |
2489 | bool code_changed = flags != 0; | |
635aff97 | 2490 | |
2491 | if (x == 0) | |
2492 | return 0; | |
2493 | code = GET_CODE (x); | |
2494 | switch (code) | |
2495 | { | |
2496 | /* Handle these cases quickly. */ | |
0349edce | 2497 | CASE_CONST_ANY: |
635aff97 | 2498 | case SYMBOL_REF: |
2499 | case LABEL_REF: | |
2500 | case CONST: | |
2501 | case PC: | |
2502 | case CC0: | |
2503 | case REG: | |
2504 | case SCRATCH: | |
2505 | return 0; | |
2506 | ||
77ad8e5a | 2507 | case UNSPEC: |
77ad8e5a | 2508 | return targetm.unspec_may_trap_p (x, flags); |
2509 | ||
bcbfcebe | 2510 | case UNSPEC_VOLATILE: |
77ad8e5a | 2511 | case ASM_INPUT: |
635aff97 | 2512 | case TRAP_IF: |
2513 | return 1; | |
2514 | ||
d18a3f6d | 2515 | case ASM_OPERANDS: |
2516 | return MEM_VOLATILE_P (x); | |
2517 | ||
635aff97 | 2518 | /* Memory ref can trap unless it's a static var or a stack slot. */ |
2519 | case MEM: | |
42982f3e | 2520 | /* Recognize specific pattern of stack checking probes. */ |
2521 | if (flag_stack_check | |
2522 | && MEM_VOLATILE_P (x) | |
2523 | && XEXP (x, 0) == stack_pointer_rtx) | |
2524 | return 1; | |
5ed26a34 | 2525 | if (/* MEM_NOTRAP_P only relates to the actual position of the memory |
0eee494e | 2526 | reference; moving it out of context such as when moving code |
2527 | when optimizing, might cause its address to become invalid. */ | |
2528 | code_changed | |
2529 | || !MEM_NOTRAP_P (x)) | |
2530 | { | |
5b2a69fa | 2531 | HOST_WIDE_INT size = MEM_SIZE_KNOWN_P (x) ? MEM_SIZE (x) : 0; |
0eee494e | 2532 | return rtx_addr_can_trap_p_1 (XEXP (x, 0), 0, size, |
2533 | GET_MODE (x), code_changed); | |
2534 | } | |
2535 | ||
2536 | return 0; | |
635aff97 | 2537 | |
2538 | /* Division by a non-constant might trap. */ | |
2539 | case DIV: | |
2540 | case MOD: | |
2541 | case UDIV: | |
2542 | case UMOD: | |
fe994837 | 2543 | if (HONOR_SNANS (x)) |
0a8176f3 | 2544 | return 1; |
cee7491d | 2545 | if (SCALAR_FLOAT_MODE_P (GET_MODE (x))) |
8e97b017 | 2546 | return flag_trapping_math; |
2547 | if (!CONSTANT_P (XEXP (x, 1)) || (XEXP (x, 1) == const0_rtx)) | |
635aff97 | 2548 | return 1; |
0dbd1c74 | 2549 | break; |
2550 | ||
4c3aec45 | 2551 | case EXPR_LIST: |
2552 | /* An EXPR_LIST is used to represent a function call. This | |
2553 | certainly may trap. */ | |
2554 | return 1; | |
0dbd1c74 | 2555 | |
27ea6d28 | 2556 | case GE: |
2557 | case GT: | |
2558 | case LE: | |
2559 | case LT: | |
f1278f7e | 2560 | case LTGT: |
51ba663d | 2561 | case COMPARE: |
27ea6d28 | 2562 | /* Some floating point comparisons may trap. */ |
350b17ef | 2563 | if (!flag_trapping_math) |
2564 | break; | |
27ea6d28 | 2565 | /* ??? There is no machine independent way to check for tests that trap |
2566 | when COMPARE is used, though many targets do make this distinction. | |
2567 | For instance, sparc uses CCFPE for compares which generate exceptions | |
2568 | and CCFP for compares which do not generate exceptions. */ | |
93633022 | 2569 | if (HONOR_NANS (x)) |
51ba663d | 2570 | return 1; |
2571 | /* But often the compare has some CC mode, so check operand | |
2572 | modes as well. */ | |
93633022 | 2573 | if (HONOR_NANS (XEXP (x, 0)) |
2574 | || HONOR_NANS (XEXP (x, 1))) | |
0a8176f3 | 2575 | return 1; |
2576 | break; | |
2577 | ||
2578 | case EQ: | |
2579 | case NE: | |
fe994837 | 2580 | if (HONOR_SNANS (x)) |
0a8176f3 | 2581 | return 1; |
2582 | /* Often comparison is CC mode, so check operand modes. */ | |
fe994837 | 2583 | if (HONOR_SNANS (XEXP (x, 0)) |
2584 | || HONOR_SNANS (XEXP (x, 1))) | |
51ba663d | 2585 | return 1; |
2586 | break; | |
2587 | ||
d0a099f8 | 2588 | case FIX: |
2589 | /* Conversion of floating point might trap. */ | |
93633022 | 2590 | if (flag_trapping_math && HONOR_NANS (XEXP (x, 0))) |
d0a099f8 | 2591 | return 1; |
2592 | break; | |
2593 | ||
4f63c6d1 | 2594 | case NEG: |
2595 | case ABS: | |
f0fbc1cd | 2596 | case SUBREG: |
4f63c6d1 | 2597 | /* These operations don't trap even with floating point. */ |
2598 | break; | |
2599 | ||
635aff97 | 2600 | default: |
2601 | /* Any floating arithmetic may trap. */ | |
bcbfcebe | 2602 | if (SCALAR_FLOAT_MODE_P (GET_MODE (x)) && flag_trapping_math) |
635aff97 | 2603 | return 1; |
2604 | } | |
2605 | ||
2606 | fmt = GET_RTX_FORMAT (code); | |
2607 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
2608 | { | |
2609 | if (fmt[i] == 'e') | |
2610 | { | |
5ed26a34 | 2611 | if (may_trap_p_1 (XEXP (x, i), flags)) |
635aff97 | 2612 | return 1; |
2613 | } | |
2614 | else if (fmt[i] == 'E') | |
2615 | { | |
19cb6b50 | 2616 | int j; |
635aff97 | 2617 | for (j = 0; j < XVECLEN (x, i); j++) |
5ed26a34 | 2618 | if (may_trap_p_1 (XVECEXP (x, i, j), flags)) |
635aff97 | 2619 | return 1; |
2620 | } | |
2621 | } | |
2622 | return 0; | |
2623 | } | |
1aecae7f | 2624 | |
2625 | /* Return nonzero if evaluating rtx X might cause a trap. */ | |
2626 | ||
2627 | int | |
dd9b9fc5 | 2628 | may_trap_p (const_rtx x) |
1aecae7f | 2629 | { |
5ed26a34 | 2630 | return may_trap_p_1 (x, 0); |
2631 | } | |
2632 | ||
334ec2d8 | 2633 | /* Same as above, but additionally return nonzero if evaluating rtx X might |
1aecae7f | 2634 | cause a fault. We define a fault for the purpose of this function as a |
2635 | erroneous execution condition that cannot be encountered during the normal | |
2636 | execution of a valid program; the typical example is an unaligned memory | |
2637 | access on a strict alignment machine. The compiler guarantees that it | |
2638 | doesn't generate code that will fault from a valid program, but this | |
2639 | guarantee doesn't mean anything for individual instructions. Consider | |
2640 | the following example: | |
2641 | ||
2642 | struct S { int d; union { char *cp; int *ip; }; }; | |
2643 | ||
2644 | int foo(struct S *s) | |
2645 | { | |
2646 | if (s->d == 1) | |
2647 | return *s->ip; | |
2648 | else | |
2649 | return *s->cp; | |
2650 | } | |
2651 | ||
2652 | on a strict alignment machine. In a valid program, foo will never be | |
2653 | invoked on a structure for which d is equal to 1 and the underlying | |
2654 | unique field of the union not aligned on a 4-byte boundary, but the | |
2655 | expression *s->ip might cause a fault if considered individually. | |
2656 | ||
2657 | At the RTL level, potentially problematic expressions will almost always | |
2658 | verify may_trap_p; for example, the above dereference can be emitted as | |
2659 | (mem:SI (reg:P)) and this expression is may_trap_p for a generic register. | |
2660 | However, suppose that foo is inlined in a caller that causes s->cp to | |
2661 | point to a local character variable and guarantees that s->d is not set | |
2662 | to 1; foo may have been effectively translated into pseudo-RTL as: | |
2663 | ||
2664 | if ((reg:SI) == 1) | |
2665 | (set (reg:SI) (mem:SI (%fp - 7))) | |
2666 | else | |
2667 | (set (reg:QI) (mem:QI (%fp - 7))) | |
2668 | ||
2669 | Now (mem:SI (%fp - 7)) is considered as not may_trap_p since it is a | |
2670 | memory reference to a stack slot, but it will certainly cause a fault | |
2671 | on a strict alignment machine. */ | |
2672 | ||
2673 | int | |
dd9b9fc5 | 2674 | may_trap_or_fault_p (const_rtx x) |
1aecae7f | 2675 | { |
0eee494e | 2676 | return may_trap_p_1 (x, 1); |
1aecae7f | 2677 | } |
635aff97 | 2678 | \f |
2679 | /* Return nonzero if X contains a comparison that is not either EQ or NE, | |
2680 | i.e., an inequality. */ | |
2681 | ||
2682 | int | |
dd9b9fc5 | 2683 | inequality_comparisons_p (const_rtx x) |
635aff97 | 2684 | { |
19cb6b50 | 2685 | const char *fmt; |
2686 | int len, i; | |
dd9b9fc5 | 2687 | const enum rtx_code code = GET_CODE (x); |
635aff97 | 2688 | |
2689 | switch (code) | |
2690 | { | |
2691 | case REG: | |
2692 | case SCRATCH: | |
2693 | case PC: | |
2694 | case CC0: | |
0349edce | 2695 | CASE_CONST_ANY: |
635aff97 | 2696 | case CONST: |
2697 | case LABEL_REF: | |
2698 | case SYMBOL_REF: | |
2699 | return 0; | |
2700 | ||
2701 | case LT: | |
2702 | case LTU: | |
2703 | case GT: | |
2704 | case GTU: | |
2705 | case LE: | |
2706 | case LEU: | |
2707 | case GE: | |
2708 | case GEU: | |
2709 | return 1; | |
2617fe26 | 2710 | |
0dbd1c74 | 2711 | default: |
2712 | break; | |
635aff97 | 2713 | } |
2714 | ||
2715 | len = GET_RTX_LENGTH (code); | |
2716 | fmt = GET_RTX_FORMAT (code); | |
2717 | ||
2718 | for (i = 0; i < len; i++) | |
2719 | { | |
2720 | if (fmt[i] == 'e') | |
2721 | { | |
2722 | if (inequality_comparisons_p (XEXP (x, i))) | |
2723 | return 1; | |
2724 | } | |
2725 | else if (fmt[i] == 'E') | |
2726 | { | |
19cb6b50 | 2727 | int j; |
635aff97 | 2728 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) |
2729 | if (inequality_comparisons_p (XVECEXP (x, i, j))) | |
2730 | return 1; | |
2731 | } | |
2732 | } | |
2617fe26 | 2733 | |
635aff97 | 2734 | return 0; |
2735 | } | |
2736 | \f | |
0a20afb5 | 2737 | /* Replace any occurrence of FROM in X with TO. The function does |
2738 | not enter into CONST_DOUBLE for the replace. | |
635aff97 | 2739 | |
2740 | Note that copying is not done so X must not be shared unless all copies | |
2741 | are to be modified. */ | |
2742 | ||
2743 | rtx | |
3ad4992f | 2744 | replace_rtx (rtx x, rtx from, rtx to) |
635aff97 | 2745 | { |
19cb6b50 | 2746 | int i, j; |
2747 | const char *fmt; | |
635aff97 | 2748 | |
2749 | if (x == from) | |
2750 | return to; | |
2751 | ||
2752 | /* Allow this function to make replacements in EXPR_LISTs. */ | |
2753 | if (x == 0) | |
2754 | return 0; | |
2755 | ||
11896b36 | 2756 | if (GET_CODE (x) == SUBREG) |
2757 | { | |
47cfb7f4 | 2758 | rtx new_rtx = replace_rtx (SUBREG_REG (x), from, to); |
11896b36 | 2759 | |
971ba038 | 2760 | if (CONST_INT_P (new_rtx)) |
11896b36 | 2761 | { |
47cfb7f4 | 2762 | x = simplify_subreg (GET_MODE (x), new_rtx, |
11896b36 | 2763 | GET_MODE (SUBREG_REG (x)), |
2764 | SUBREG_BYTE (x)); | |
04e579b6 | 2765 | gcc_assert (x); |
11896b36 | 2766 | } |
2767 | else | |
47cfb7f4 | 2768 | SUBREG_REG (x) = new_rtx; |
11896b36 | 2769 | |
2770 | return x; | |
2771 | } | |
2772 | else if (GET_CODE (x) == ZERO_EXTEND) | |
2773 | { | |
47cfb7f4 | 2774 | rtx new_rtx = replace_rtx (XEXP (x, 0), from, to); |
11896b36 | 2775 | |
971ba038 | 2776 | if (CONST_INT_P (new_rtx)) |
11896b36 | 2777 | { |
2778 | x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x), | |
47cfb7f4 | 2779 | new_rtx, GET_MODE (XEXP (x, 0))); |
04e579b6 | 2780 | gcc_assert (x); |
11896b36 | 2781 | } |
2782 | else | |
47cfb7f4 | 2783 | XEXP (x, 0) = new_rtx; |
11896b36 | 2784 | |
2785 | return x; | |
2786 | } | |
2787 | ||
635aff97 | 2788 | fmt = GET_RTX_FORMAT (GET_CODE (x)); |
2789 | for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--) | |
2790 | { | |
2791 | if (fmt[i] == 'e') | |
2792 | XEXP (x, i) = replace_rtx (XEXP (x, i), from, to); | |
2793 | else if (fmt[i] == 'E') | |
2794 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) | |
2795 | XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to); | |
2796 | } | |
2797 | ||
2798 | return x; | |
2617fe26 | 2799 | } |
635aff97 | 2800 | \f |
956816a2 | 2801 | /* Replace occurrences of the OLD_LABEL in *LOC with NEW_LABEL. Also track |
2802 | the change in LABEL_NUSES if UPDATE_LABEL_NUSES. */ | |
f2756aab | 2803 | |
956816a2 | 2804 | void |
2805 | replace_label (rtx *loc, rtx old_label, rtx new_label, bool update_label_nuses) | |
f2756aab | 2806 | { |
956816a2 | 2807 | /* Handle jump tables specially, since ADDR_{DIFF_,}VECs can be long. */ |
2808 | rtx x = *loc; | |
2809 | if (JUMP_TABLE_DATA_P (x)) | |
cda612f5 | 2810 | { |
956816a2 | 2811 | x = PATTERN (x); |
2812 | rtvec vec = XVEC (x, GET_CODE (x) == ADDR_DIFF_VEC); | |
2813 | int len = GET_NUM_ELEM (vec); | |
2814 | for (int i = 0; i < len; ++i) | |
cda612f5 | 2815 | { |
956816a2 | 2816 | rtx ref = RTVEC_ELT (vec, i); |
2817 | if (XEXP (ref, 0) == old_label) | |
2818 | { | |
2819 | XEXP (ref, 0) = new_label; | |
2820 | if (update_label_nuses) | |
2821 | { | |
2822 | ++LABEL_NUSES (new_label); | |
2823 | --LABEL_NUSES (old_label); | |
2824 | } | |
2825 | } | |
cda612f5 | 2826 | } |
956816a2 | 2827 | return; |
cda612f5 | 2828 | } |
2829 | ||
f2756aab | 2830 | /* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL |
956816a2 | 2831 | field. This is not handled by the iterator because it doesn't |
f2756aab | 2832 | handle unprinted ('0') fields. */ |
956816a2 | 2833 | if (JUMP_P (x) && JUMP_LABEL (x) == old_label) |
2834 | JUMP_LABEL (x) = new_label; | |
f2756aab | 2835 | |
956816a2 | 2836 | subrtx_ptr_iterator::array_type array; |
2837 | FOR_EACH_SUBRTX_PTR (iter, array, loc, ALL) | |
cda612f5 | 2838 | { |
956816a2 | 2839 | rtx *loc = *iter; |
2840 | if (rtx x = *loc) | |
cda612f5 | 2841 | { |
956816a2 | 2842 | if (GET_CODE (x) == SYMBOL_REF |
2843 | && CONSTANT_POOL_ADDRESS_P (x)) | |
2844 | { | |
2845 | rtx c = get_pool_constant (x); | |
2846 | if (rtx_referenced_p (old_label, c)) | |
2847 | { | |
2848 | /* Create a copy of constant C; replace the label inside | |
2849 | but do not update LABEL_NUSES because uses in constant pool | |
2850 | are not counted. */ | |
2851 | rtx new_c = copy_rtx (c); | |
2852 | replace_label (&new_c, old_label, new_label, false); | |
2853 | ||
2854 | /* Add the new constant NEW_C to constant pool and replace | |
2855 | the old reference to constant by new reference. */ | |
2856 | rtx new_mem = force_const_mem (get_pool_mode (x), new_c); | |
2857 | *loc = replace_rtx (x, x, XEXP (new_mem, 0)); | |
2858 | } | |
2859 | } | |
2860 | ||
2861 | if ((GET_CODE (x) == LABEL_REF | |
2862 | || GET_CODE (x) == INSN_LIST) | |
2863 | && XEXP (x, 0) == old_label) | |
2864 | { | |
2865 | XEXP (x, 0) = new_label; | |
2866 | if (update_label_nuses) | |
2867 | { | |
2868 | ++LABEL_NUSES (new_label); | |
2869 | --LABEL_NUSES (old_label); | |
2870 | } | |
2871 | } | |
cda612f5 | 2872 | } |
cda612f5 | 2873 | } |
956816a2 | 2874 | } |
f2756aab | 2875 | |
956816a2 | 2876 | void |
2877 | replace_label_in_insn (rtx_insn *insn, rtx old_label, rtx new_label, | |
2878 | bool update_label_nuses) | |
2879 | { | |
2880 | rtx insn_as_rtx = insn; | |
2881 | replace_label (&insn_as_rtx, old_label, new_label, update_label_nuses); | |
2882 | gcc_checking_assert (insn_as_rtx == insn); | |
f2756aab | 2883 | } |
2884 | ||
db184797 | 2885 | /* Return true if X is referenced in BODY. */ |
f2756aab | 2886 | |
db184797 | 2887 | bool |
2888 | rtx_referenced_p (const_rtx x, const_rtx body) | |
f2756aab | 2889 | { |
db184797 | 2890 | subrtx_iterator::array_type array; |
2891 | FOR_EACH_SUBRTX (iter, array, body, ALL) | |
2892 | if (const_rtx y = *iter) | |
2893 | { | |
2894 | /* Check if a label_ref Y refers to label X. */ | |
b49f2e4b | 2895 | if (GET_CODE (y) == LABEL_REF |
2896 | && LABEL_P (x) | |
2897 | && LABEL_REF_LABEL (y) == x) | |
db184797 | 2898 | return true; |
f2756aab | 2899 | |
db184797 | 2900 | if (rtx_equal_p (x, y)) |
2901 | return true; | |
f2756aab | 2902 | |
db184797 | 2903 | /* If Y is a reference to pool constant traverse the constant. */ |
2904 | if (GET_CODE (y) == SYMBOL_REF | |
2905 | && CONSTANT_POOL_ADDRESS_P (y)) | |
2906 | iter.substitute (get_pool_constant (y)); | |
2907 | } | |
2908 | return false; | |
f2756aab | 2909 | } |
2910 | ||
afff715a | 2911 | /* If INSN is a tablejump return true and store the label (before jump table) to |
2912 | *LABELP and the jump table to *TABLEP. LABELP and TABLEP may be NULL. */ | |
f2756aab | 2913 | |
2914 | bool | |
28fbb2b5 | 2915 | tablejump_p (const rtx_insn *insn, rtx *labelp, rtx_jump_table_data **tablep) |
f2756aab | 2916 | { |
f9a00e9e | 2917 | rtx label; |
2918 | rtx_insn *table; | |
afff715a | 2919 | |
4115ac36 | 2920 | if (!JUMP_P (insn)) |
2921 | return false; | |
2922 | ||
2923 | label = JUMP_LABEL (insn); | |
2924 | if (label != NULL_RTX && !ANY_RETURN_P (label) | |
91a55c11 | 2925 | && (table = NEXT_INSN (as_a <rtx_insn *> (label))) != NULL_RTX |
971ba038 | 2926 | && JUMP_TABLE_DATA_P (table)) |
f2756aab | 2927 | { |
afff715a | 2928 | if (labelp) |
2929 | *labelp = label; | |
2930 | if (tablep) | |
c86d86ff | 2931 | *tablep = as_a <rtx_jump_table_data *> (table); |
f2756aab | 2932 | return true; |
2933 | } | |
2934 | return false; | |
2935 | } | |
2936 | ||
4e44a132 | 2937 | /* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or |
2938 | constant that is not in the constant pool and not in the condition | |
2939 | of an IF_THEN_ELSE. */ | |
ca6d6e84 | 2940 | |
2941 | static int | |
dd9b9fc5 | 2942 | computed_jump_p_1 (const_rtx x) |
ca6d6e84 | 2943 | { |
dd9b9fc5 | 2944 | const enum rtx_code code = GET_CODE (x); |
ca6d6e84 | 2945 | int i, j; |
d2ca078f | 2946 | const char *fmt; |
ca6d6e84 | 2947 | |
2948 | switch (code) | |
2949 | { | |
ca6d6e84 | 2950 | case LABEL_REF: |
2951 | case PC: | |
2952 | return 0; | |
2953 | ||
4e44a132 | 2954 | case CONST: |
0349edce | 2955 | CASE_CONST_ANY: |
4e44a132 | 2956 | case SYMBOL_REF: |
ca6d6e84 | 2957 | case REG: |
2958 | return 1; | |
2959 | ||
2960 | case MEM: | |
2961 | return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF | |
2962 | && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0))); | |
2963 | ||
2964 | case IF_THEN_ELSE: | |
4e44a132 | 2965 | return (computed_jump_p_1 (XEXP (x, 1)) |
2966 | || computed_jump_p_1 (XEXP (x, 2))); | |
99c14947 | 2967 | |
2968 | default: | |
2969 | break; | |
ca6d6e84 | 2970 | } |
2971 | ||
2972 | fmt = GET_RTX_FORMAT (code); | |
2973 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
2974 | { | |
2975 | if (fmt[i] == 'e' | |
4e44a132 | 2976 | && computed_jump_p_1 (XEXP (x, i))) |
ca6d6e84 | 2977 | return 1; |
2978 | ||
1bd8ca86 | 2979 | else if (fmt[i] == 'E') |
ca6d6e84 | 2980 | for (j = 0; j < XVECLEN (x, i); j++) |
4e44a132 | 2981 | if (computed_jump_p_1 (XVECEXP (x, i, j))) |
ca6d6e84 | 2982 | return 1; |
2983 | } | |
2984 | ||
2985 | return 0; | |
2986 | } | |
2987 | ||
2988 | /* Return nonzero if INSN is an indirect jump (aka computed jump). | |
2989 | ||
2990 | Tablejumps and casesi insns are not considered indirect jumps; | |
9588521d | 2991 | we can recognize them by a (use (label_ref)). */ |
ca6d6e84 | 2992 | |
2993 | int | |
181908bb | 2994 | computed_jump_p (const rtx_insn *insn) |
ca6d6e84 | 2995 | { |
2996 | int i; | |
6d7dc5b9 | 2997 | if (JUMP_P (insn)) |
ca6d6e84 | 2998 | { |
2999 | rtx pat = PATTERN (insn); | |
ca6d6e84 | 3000 | |
19d2fe05 | 3001 | /* If we have a JUMP_LABEL set, we're not a computed jump. */ |
3002 | if (JUMP_LABEL (insn) != NULL) | |
d3ff0f75 | 3003 | return 0; |
19d2fe05 | 3004 | |
3005 | if (GET_CODE (pat) == PARALLEL) | |
ca6d6e84 | 3006 | { |
3007 | int len = XVECLEN (pat, 0); | |
3008 | int has_use_labelref = 0; | |
3009 | ||
3010 | for (i = len - 1; i >= 0; i--) | |
3011 | if (GET_CODE (XVECEXP (pat, 0, i)) == USE | |
3012 | && (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0)) | |
3013 | == LABEL_REF)) | |
b766ccbc | 3014 | { |
3015 | has_use_labelref = 1; | |
3016 | break; | |
3017 | } | |
ca6d6e84 | 3018 | |
3019 | if (! has_use_labelref) | |
3020 | for (i = len - 1; i >= 0; i--) | |
3021 | if (GET_CODE (XVECEXP (pat, 0, i)) == SET | |
3022 | && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx | |
4e44a132 | 3023 | && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i)))) |
ca6d6e84 | 3024 | return 1; |
3025 | } | |
3026 | else if (GET_CODE (pat) == SET | |
3027 | && SET_DEST (pat) == pc_rtx | |
4e44a132 | 3028 | && computed_jump_p_1 (SET_SRC (pat))) |
ca6d6e84 | 3029 | return 1; |
3030 | } | |
3031 | return 0; | |
3032 | } | |
fb8acade | 3033 | |
1f864115 | 3034 | \f |
3035 | ||
623ad592 | 3036 | /* MEM has a PRE/POST-INC/DEC/MODIFY address X. Extract the operands of |
3037 | the equivalent add insn and pass the result to FN, using DATA as the | |
3038 | final argument. */ | |
1f864115 | 3039 | |
3040 | static int | |
623ad592 | 3041 | for_each_inc_dec_find_inc_dec (rtx mem, for_each_inc_dec_fn fn, void *data) |
1f864115 | 3042 | { |
623ad592 | 3043 | rtx x = XEXP (mem, 0); |
1f864115 | 3044 | switch (GET_CODE (x)) |
3045 | { | |
3046 | case PRE_INC: | |
3047 | case POST_INC: | |
3048 | { | |
623ad592 | 3049 | int size = GET_MODE_SIZE (GET_MODE (mem)); |
1f864115 | 3050 | rtx r1 = XEXP (x, 0); |
3051 | rtx c = gen_int_mode (size, GET_MODE (r1)); | |
623ad592 | 3052 | return fn (mem, x, r1, r1, c, data); |
1f864115 | 3053 | } |
3054 | ||
3055 | case PRE_DEC: | |
3056 | case POST_DEC: | |
3057 | { | |
623ad592 | 3058 | int size = GET_MODE_SIZE (GET_MODE (mem)); |
1f864115 | 3059 | rtx r1 = XEXP (x, 0); |
3060 | rtx c = gen_int_mode (-size, GET_MODE (r1)); | |
623ad592 | 3061 | return fn (mem, x, r1, r1, c, data); |
1f864115 | 3062 | } |
3063 | ||
3064 | case PRE_MODIFY: | |
3065 | case POST_MODIFY: | |
3066 | { | |
3067 | rtx r1 = XEXP (x, 0); | |
3068 | rtx add = XEXP (x, 1); | |
623ad592 | 3069 | return fn (mem, x, r1, add, NULL, data); |
1f864115 | 3070 | } |
3071 | ||
3072 | default: | |
623ad592 | 3073 | gcc_unreachable (); |
1f864115 | 3074 | } |
3075 | } | |
3076 | ||
623ad592 | 3077 | /* Traverse *LOC looking for MEMs that have autoinc addresses. |
3078 | For each such autoinc operation found, call FN, passing it | |
1f864115 | 3079 | the innermost enclosing MEM, the operation itself, the RTX modified |
3080 | by the operation, two RTXs (the second may be NULL) that, once | |
3081 | added, represent the value to be held by the modified RTX | |
623ad592 | 3082 | afterwards, and DATA. FN is to return 0 to continue the |
3083 | traversal or any other value to have it returned to the caller of | |
1f864115 | 3084 | for_each_inc_dec. */ |
3085 | ||
3086 | int | |
623ad592 | 3087 | for_each_inc_dec (rtx x, |
1f864115 | 3088 | for_each_inc_dec_fn fn, |
623ad592 | 3089 | void *data) |
1f864115 | 3090 | { |
623ad592 | 3091 | subrtx_var_iterator::array_type array; |
3092 | FOR_EACH_SUBRTX_VAR (iter, array, x, NONCONST) | |
3093 | { | |
3094 | rtx mem = *iter; | |
3095 | if (mem | |
3096 | && MEM_P (mem) | |
3097 | && GET_RTX_CLASS (GET_CODE (XEXP (mem, 0))) == RTX_AUTOINC) | |
3098 | { | |
3099 | int res = for_each_inc_dec_find_inc_dec (mem, fn, data); | |
3100 | if (res != 0) | |
3101 | return res; | |
3102 | iter.skip_subrtxes (); | |
3103 | } | |
3104 | } | |
3105 | return 0; | |
1f864115 | 3106 | } |
3107 | ||
3108 | \f | |
0919e10f | 3109 | /* Searches X for any reference to REGNO, returning the rtx of the |
3110 | reference found if any. Otherwise, returns NULL_RTX. */ | |
3111 | ||
3112 | rtx | |
3ad4992f | 3113 | regno_use_in (unsigned int regno, rtx x) |
0919e10f | 3114 | { |
19cb6b50 | 3115 | const char *fmt; |
0919e10f | 3116 | int i, j; |
3117 | rtx tem; | |
3118 | ||
8ad4c111 | 3119 | if (REG_P (x) && REGNO (x) == regno) |
0919e10f | 3120 | return x; |
3121 | ||
3122 | fmt = GET_RTX_FORMAT (GET_CODE (x)); | |
3123 | for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--) | |
3124 | { | |
3125 | if (fmt[i] == 'e') | |
3126 | { | |
3127 | if ((tem = regno_use_in (regno, XEXP (x, i)))) | |
3128 | return tem; | |
3129 | } | |
3130 | else if (fmt[i] == 'E') | |
3131 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) | |
3132 | if ((tem = regno_use_in (regno , XVECEXP (x, i, j)))) | |
3133 | return tem; | |
3134 | } | |
3135 | ||
3136 | return NULL_RTX; | |
3137 | } | |
c4f0a530 | 3138 | |
09f800b9 | 3139 | /* Return a value indicating whether OP, an operand of a commutative |
3140 | operation, is preferred as the first or second operand. The higher | |
3141 | the value, the stronger the preference for being the first operand. | |
3142 | We use negative values to indicate a preference for the first operand | |
3143 | and positive values for the second operand. */ | |
3144 | ||
f4ad60b7 | 3145 | int |
3ad4992f | 3146 | commutative_operand_precedence (rtx op) |
09f800b9 | 3147 | { |
b147a3b4 | 3148 | enum rtx_code code = GET_CODE (op); |
48e1416a | 3149 | |
09f800b9 | 3150 | /* Constants always come the second operand. Prefer "nice" constants. */ |
b147a3b4 | 3151 | if (code == CONST_INT) |
1f3b83af | 3152 | return -8; |
e913b5cd | 3153 | if (code == CONST_WIDE_INT) |
3154 | return -8; | |
b147a3b4 | 3155 | if (code == CONST_DOUBLE) |
1f3b83af | 3156 | return -7; |
e397ad8e | 3157 | if (code == CONST_FIXED) |
3158 | return -7; | |
efbc8128 | 3159 | op = avoid_constant_pool_reference (op); |
57260f80 | 3160 | code = GET_CODE (op); |
6720e96c | 3161 | |
3162 | switch (GET_RTX_CLASS (code)) | |
3163 | { | |
3164 | case RTX_CONST_OBJ: | |
3165 | if (code == CONST_INT) | |
1f3b83af | 3166 | return -6; |
e913b5cd | 3167 | if (code == CONST_WIDE_INT) |
3168 | return -6; | |
6720e96c | 3169 | if (code == CONST_DOUBLE) |
1f3b83af | 3170 | return -5; |
e397ad8e | 3171 | if (code == CONST_FIXED) |
3172 | return -5; | |
1f3b83af | 3173 | return -4; |
6720e96c | 3174 | |
3175 | case RTX_EXTRA: | |
3176 | /* SUBREGs of objects should come second. */ | |
3177 | if (code == SUBREG && OBJECT_P (SUBREG_REG (op))) | |
1f3b83af | 3178 | return -3; |
3072d30e | 3179 | return 0; |
6720e96c | 3180 | |
3181 | case RTX_OBJ: | |
3182 | /* Complex expressions should be the first, so decrease priority | |
1f3b83af | 3183 | of objects. Prefer pointer objects over non pointer objects. */ |
3184 | if ((REG_P (op) && REG_POINTER (op)) | |
3185 | || (MEM_P (op) && MEM_POINTER (op))) | |
3186 | return -1; | |
3187 | return -2; | |
6720e96c | 3188 | |
3189 | case RTX_COMM_ARITH: | |
3190 | /* Prefer operands that are themselves commutative to be first. | |
3191 | This helps to make things linear. In particular, | |
3192 | (and (and (reg) (reg)) (not (reg))) is canonical. */ | |
3193 | return 4; | |
3194 | ||
3195 | case RTX_BIN_ARITH: | |
3196 | /* If only one operand is a binary expression, it will be the first | |
3197 | operand. In particular, (plus (minus (reg) (reg)) (neg (reg))) | |
3198 | is canonical, although it will usually be further simplified. */ | |
3199 | return 2; | |
48e1416a | 3200 | |
6720e96c | 3201 | case RTX_UNARY: |
3202 | /* Then prefer NEG and NOT. */ | |
3203 | if (code == NEG || code == NOT) | |
3204 | return 1; | |
09f800b9 | 3205 | |
6720e96c | 3206 | default: |
3207 | return 0; | |
3208 | } | |
09f800b9 | 3209 | } |
3210 | ||
dd5b4b36 | 3211 | /* Return 1 iff it is necessary to swap operands of commutative operation |
09f800b9 | 3212 | in order to canonicalize expression. */ |
3213 | ||
1f3b83af | 3214 | bool |
3ad4992f | 3215 | swap_commutative_operands_p (rtx x, rtx y) |
09f800b9 | 3216 | { |
f4ad60b7 | 3217 | return (commutative_operand_precedence (x) |
3218 | < commutative_operand_precedence (y)); | |
09f800b9 | 3219 | } |
c4f0a530 | 3220 | |
3221 | /* Return 1 if X is an autoincrement side effect and the register is | |
3222 | not the stack pointer. */ | |
3223 | int | |
dd9b9fc5 | 3224 | auto_inc_p (const_rtx x) |
c4f0a530 | 3225 | { |
3226 | switch (GET_CODE (x)) | |
3227 | { | |
3228 | case PRE_INC: | |
3229 | case POST_INC: | |
3230 | case PRE_DEC: | |
3231 | case POST_DEC: | |
3232 | case PRE_MODIFY: | |
3233 | case POST_MODIFY: | |
3234 | /* There are no REG_INC notes for SP. */ | |
3235 | if (XEXP (x, 0) != stack_pointer_rtx) | |
3236 | return 1; | |
3237 | default: | |
3238 | break; | |
3239 | } | |
3240 | return 0; | |
3241 | } | |
b067e925 | 3242 | |
2358393e | 3243 | /* Return nonzero if IN contains a piece of rtl that has the address LOC. */ |
2c663070 | 3244 | int |
dd9b9fc5 | 3245 | loc_mentioned_in_p (rtx *loc, const_rtx in) |
2c663070 | 3246 | { |
42a3a38b | 3247 | enum rtx_code code; |
3248 | const char *fmt; | |
2c663070 | 3249 | int i, j; |
3250 | ||
42a3a38b | 3251 | if (!in) |
3252 | return 0; | |
3253 | ||
3254 | code = GET_CODE (in); | |
3255 | fmt = GET_RTX_FORMAT (code); | |
2c663070 | 3256 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) |
3257 | { | |
2c663070 | 3258 | if (fmt[i] == 'e') |
3259 | { | |
c8707f08 | 3260 | if (loc == &XEXP (in, i) || loc_mentioned_in_p (loc, XEXP (in, i))) |
2c663070 | 3261 | return 1; |
3262 | } | |
3263 | else if (fmt[i] == 'E') | |
3264 | for (j = XVECLEN (in, i) - 1; j >= 0; j--) | |
c8707f08 | 3265 | if (loc == &XVECEXP (in, i, j) |
3266 | || loc_mentioned_in_p (loc, XVECEXP (in, i, j))) | |
2c663070 | 3267 | return 1; |
3268 | } | |
3269 | return 0; | |
3270 | } | |
701e46d0 | 3271 | |
f36eb1e9 | 3272 | /* Helper function for subreg_lsb. Given a subreg's OUTER_MODE, INNER_MODE, |
3273 | and SUBREG_BYTE, return the bit offset where the subreg begins | |
3274 | (counting from the least significant bit of the operand). */ | |
ef4e6755 | 3275 | |
3276 | unsigned int | |
3754d046 | 3277 | subreg_lsb_1 (machine_mode outer_mode, |
3278 | machine_mode inner_mode, | |
f36eb1e9 | 3279 | unsigned int subreg_byte) |
ef4e6755 | 3280 | { |
ef4e6755 | 3281 | unsigned int bitpos; |
3282 | unsigned int byte; | |
3283 | unsigned int word; | |
3284 | ||
3285 | /* A paradoxical subreg begins at bit position 0. */ | |
ded805e6 | 3286 | if (GET_MODE_PRECISION (outer_mode) > GET_MODE_PRECISION (inner_mode)) |
ef4e6755 | 3287 | return 0; |
3288 | ||
3289 | if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN) | |
3290 | /* If the subreg crosses a word boundary ensure that | |
3291 | it also begins and ends on a word boundary. */ | |
04e579b6 | 3292 | gcc_assert (!((subreg_byte % UNITS_PER_WORD |
3293 | + GET_MODE_SIZE (outer_mode)) > UNITS_PER_WORD | |
3294 | && (subreg_byte % UNITS_PER_WORD | |
3295 | || GET_MODE_SIZE (outer_mode) % UNITS_PER_WORD))); | |
ef4e6755 | 3296 | |
3297 | if (WORDS_BIG_ENDIAN) | |
3298 | word = (GET_MODE_SIZE (inner_mode) | |
f36eb1e9 | 3299 | - (subreg_byte + GET_MODE_SIZE (outer_mode))) / UNITS_PER_WORD; |
ef4e6755 | 3300 | else |
f36eb1e9 | 3301 | word = subreg_byte / UNITS_PER_WORD; |
ef4e6755 | 3302 | bitpos = word * BITS_PER_WORD; |
3303 | ||
3304 | if (BYTES_BIG_ENDIAN) | |
3305 | byte = (GET_MODE_SIZE (inner_mode) | |
f36eb1e9 | 3306 | - (subreg_byte + GET_MODE_SIZE (outer_mode))) % UNITS_PER_WORD; |
ef4e6755 | 3307 | else |
f36eb1e9 | 3308 | byte = subreg_byte % UNITS_PER_WORD; |
ef4e6755 | 3309 | bitpos += byte * BITS_PER_UNIT; |
3310 | ||
3311 | return bitpos; | |
3312 | } | |
3313 | ||
f36eb1e9 | 3314 | /* Given a subreg X, return the bit offset where the subreg begins |
3315 | (counting from the least significant bit of the reg). */ | |
3316 | ||
3317 | unsigned int | |
dd9b9fc5 | 3318 | subreg_lsb (const_rtx x) |
f36eb1e9 | 3319 | { |
3320 | return subreg_lsb_1 (GET_MODE (x), GET_MODE (SUBREG_REG (x)), | |
3321 | SUBREG_BYTE (x)); | |
3322 | } | |
3323 | ||
fe2ebfc8 | 3324 | /* Fill in information about a subreg of a hard register. |
701e46d0 | 3325 | xregno - A regno of an inner hard subreg_reg (or what will become one). |
3326 | xmode - The mode of xregno. | |
3327 | offset - The byte offset. | |
3328 | ymode - The mode of a top level SUBREG (or what may become one). | |
5ccab9d4 | 3329 | info - Pointer to structure to fill in. |
3330 | ||
3331 | Rather than considering one particular inner register (and thus one | |
3332 | particular "outer" register) in isolation, this function really uses | |
3333 | XREGNO as a model for a sequence of isomorphic hard registers. Thus the | |
3334 | function does not check whether adding INFO->offset to XREGNO gives | |
3335 | a valid hard register; even if INFO->offset + XREGNO is out of range, | |
3336 | there might be another register of the same type that is in range. | |
3337 | Likewise it doesn't check whether HARD_REGNO_MODE_OK accepts the new | |
3338 | register, since that can depend on things like whether the final | |
3339 | register number is even or odd. Callers that want to check whether | |
3340 | this particular subreg can be replaced by a simple (reg ...) should | |
3341 | use simplify_subreg_regno. */ | |
3342 | ||
9680c846 | 3343 | void |
3754d046 | 3344 | subreg_get_info (unsigned int xregno, machine_mode xmode, |
3345 | unsigned int offset, machine_mode ymode, | |
fe2ebfc8 | 3346 | struct subreg_info *info) |
d9b3752c | 3347 | { |
695595bc | 3348 | int nregs_xmode, nregs_ymode; |
d9b3752c | 3349 | int mode_multiple, nregs_multiple; |
fe2ebfc8 | 3350 | int offset_adj, y_offset, y_offset_adj; |
695595bc | 3351 | int regsize_xmode, regsize_ymode; |
fe2ebfc8 | 3352 | bool rknown; |
d9b3752c | 3353 | |
04e579b6 | 3354 | gcc_assert (xregno < FIRST_PSEUDO_REGISTER); |
d9b3752c | 3355 | |
fe2ebfc8 | 3356 | rknown = false; |
3357 | ||
ed21e7ff | 3358 | /* If there are holes in a non-scalar mode in registers, we expect |
3359 | that it is made up of its units concatenated together. */ | |
695595bc | 3360 | if (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode)) |
ed21e7ff | 3361 | { |
3754d046 | 3362 | machine_mode xmode_unit; |
695595bc | 3363 | |
3364 | nregs_xmode = HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode); | |
3365 | if (GET_MODE_INNER (xmode) == VOIDmode) | |
3366 | xmode_unit = xmode; | |
3367 | else | |
3368 | xmode_unit = GET_MODE_INNER (xmode); | |
3369 | gcc_assert (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode_unit)); | |
3370 | gcc_assert (nregs_xmode | |
3371 | == (GET_MODE_NUNITS (xmode) | |
3372 | * HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode_unit))); | |
3373 | gcc_assert (hard_regno_nregs[xregno][xmode] | |
3374 | == (hard_regno_nregs[xregno][xmode_unit] | |
3375 | * GET_MODE_NUNITS (xmode))); | |
ed21e7ff | 3376 | |
3377 | /* You can only ask for a SUBREG of a value with holes in the middle | |
3378 | if you don't cross the holes. (Such a SUBREG should be done by | |
3379 | picking a different register class, or doing it in memory if | |
3380 | necessary.) An example of a value with holes is XCmode on 32-bit | |
3381 | x86 with -m128bit-long-double; it's represented in 6 32-bit registers, | |
48e1416a | 3382 | 3 for each part, but in memory it's two 128-bit parts. |
ed21e7ff | 3383 | Padding is assumed to be at the end (not necessarily the 'high part') |
3384 | of each unit. */ | |
48e1416a | 3385 | if ((offset / GET_MODE_SIZE (xmode_unit) + 1 |
695595bc | 3386 | < GET_MODE_NUNITS (xmode)) |
3387 | && (offset / GET_MODE_SIZE (xmode_unit) | |
ed21e7ff | 3388 | != ((offset + GET_MODE_SIZE (ymode) - 1) |
695595bc | 3389 | / GET_MODE_SIZE (xmode_unit)))) |
fe2ebfc8 | 3390 | { |
3391 | info->representable_p = false; | |
3392 | rknown = true; | |
3393 | } | |
ed21e7ff | 3394 | } |
3395 | else | |
3396 | nregs_xmode = hard_regno_nregs[xregno][xmode]; | |
48e1416a | 3397 | |
67d6c12b | 3398 | nregs_ymode = hard_regno_nregs[xregno][ymode]; |
d9b3752c | 3399 | |
ed21e7ff | 3400 | /* Paradoxical subregs are otherwise valid. */ |
fe2ebfc8 | 3401 | if (!rknown |
3402 | && offset == 0 | |
ded805e6 | 3403 | && GET_MODE_PRECISION (ymode) > GET_MODE_PRECISION (xmode)) |
fe2ebfc8 | 3404 | { |
3405 | info->representable_p = true; | |
3406 | /* If this is a big endian paradoxical subreg, which uses more | |
3407 | actual hard registers than the original register, we must | |
3408 | return a negative offset so that we find the proper highpart | |
3409 | of the register. */ | |
3410 | if (GET_MODE_SIZE (ymode) > UNITS_PER_WORD | |
76c64076 | 3411 | ? REG_WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN) |
fe2ebfc8 | 3412 | info->offset = nregs_xmode - nregs_ymode; |
3413 | else | |
3414 | info->offset = 0; | |
3415 | info->nregs = nregs_ymode; | |
3416 | return; | |
3417 | } | |
d9b3752c | 3418 | |
695595bc | 3419 | /* If registers store different numbers of bits in the different |
3420 | modes, we cannot generally form this subreg. */ | |
fe2ebfc8 | 3421 | if (!HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode) |
a100ece7 | 3422 | && !HARD_REGNO_NREGS_HAS_PADDING (xregno, ymode) |
3423 | && (GET_MODE_SIZE (xmode) % nregs_xmode) == 0 | |
3424 | && (GET_MODE_SIZE (ymode) % nregs_ymode) == 0) | |
fe2ebfc8 | 3425 | { |
3426 | regsize_xmode = GET_MODE_SIZE (xmode) / nregs_xmode; | |
fe2ebfc8 | 3427 | regsize_ymode = GET_MODE_SIZE (ymode) / nregs_ymode; |
fe2ebfc8 | 3428 | if (!rknown && regsize_xmode > regsize_ymode && nregs_ymode > 1) |
3429 | { | |
3430 | info->representable_p = false; | |
3431 | info->nregs | |
3432 | = (GET_MODE_SIZE (ymode) + regsize_xmode - 1) / regsize_xmode; | |
3433 | info->offset = offset / regsize_xmode; | |
3434 | return; | |
3435 | } | |
3436 | if (!rknown && regsize_ymode > regsize_xmode && nregs_xmode > 1) | |
3437 | { | |
3438 | info->representable_p = false; | |
3439 | info->nregs | |
3440 | = (GET_MODE_SIZE (ymode) + regsize_xmode - 1) / regsize_xmode; | |
3441 | info->offset = offset / regsize_xmode; | |
3442 | return; | |
3443 | } | |
ef8a7a52 | 3444 | /* Quick exit for the simple and common case of extracting whole |
3445 | subregisters from a multiregister value. */ | |
3446 | /* ??? It would be better to integrate this into the code below, | |
3447 | if we can generalize the concept enough and figure out how | |
3448 | odd-sized modes can coexist with the other weird cases we support. */ | |
3449 | if (!rknown | |
3450 | && WORDS_BIG_ENDIAN == REG_WORDS_BIG_ENDIAN | |
3451 | && regsize_xmode == regsize_ymode | |
3452 | && (offset % regsize_ymode) == 0) | |
3453 | { | |
3454 | info->representable_p = true; | |
3455 | info->nregs = nregs_ymode; | |
3456 | info->offset = offset / regsize_ymode; | |
3457 | gcc_assert (info->offset + info->nregs <= nregs_xmode); | |
3458 | return; | |
3459 | } | |
fe2ebfc8 | 3460 | } |
695595bc | 3461 | |
ed21e7ff | 3462 | /* Lowpart subregs are otherwise valid. */ |
fe2ebfc8 | 3463 | if (!rknown && offset == subreg_lowpart_offset (ymode, xmode)) |
3464 | { | |
3465 | info->representable_p = true; | |
3466 | rknown = true; | |
8ef3d190 | 3467 | |
3468 | if (offset == 0 || nregs_xmode == nregs_ymode) | |
3469 | { | |
3470 | info->offset = 0; | |
3471 | info->nregs = nregs_ymode; | |
3472 | return; | |
3473 | } | |
fe2ebfc8 | 3474 | } |
d9b3752c | 3475 | |
ed21e7ff | 3476 | /* This should always pass, otherwise we don't know how to verify |
3477 | the constraint. These conditions may be relaxed but | |
3478 | subreg_regno_offset would need to be redesigned. */ | |
04e579b6 | 3479 | gcc_assert ((GET_MODE_SIZE (xmode) % GET_MODE_SIZE (ymode)) == 0); |
04e579b6 | 3480 | gcc_assert ((nregs_xmode % nregs_ymode) == 0); |
d9b3752c | 3481 | |
76c64076 | 3482 | if (WORDS_BIG_ENDIAN != REG_WORDS_BIG_ENDIAN |
3483 | && GET_MODE_SIZE (xmode) > UNITS_PER_WORD) | |
3484 | { | |
3485 | HOST_WIDE_INT xsize = GET_MODE_SIZE (xmode); | |
3486 | HOST_WIDE_INT ysize = GET_MODE_SIZE (ymode); | |
3487 | HOST_WIDE_INT off_low = offset & (ysize - 1); | |
3488 | HOST_WIDE_INT off_high = offset & ~(ysize - 1); | |
3489 | offset = (xsize - ysize - off_high) | off_low; | |
3490 | } | |
df07c3ae | 3491 | /* The XMODE value can be seen as a vector of NREGS_XMODE |
845bebef | 3492 | values. The subreg must represent a lowpart of given field. |
d9b3752c | 3493 | Compute what field it is. */ |
fe2ebfc8 | 3494 | offset_adj = offset; |
3495 | offset_adj -= subreg_lowpart_offset (ymode, | |
3496 | mode_for_size (GET_MODE_BITSIZE (xmode) | |
3497 | / nregs_xmode, | |
3498 | MODE_INT, 0)); | |
d9b3752c | 3499 | |
ed21e7ff | 3500 | /* Size of ymode must not be greater than the size of xmode. */ |
d9b3752c | 3501 | mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode); |
04e579b6 | 3502 | gcc_assert (mode_multiple != 0); |
d9b3752c | 3503 | |
3504 | y_offset = offset / GET_MODE_SIZE (ymode); | |
fe2ebfc8 | 3505 | y_offset_adj = offset_adj / GET_MODE_SIZE (ymode); |
3506 | nregs_multiple = nregs_xmode / nregs_ymode; | |
04e579b6 | 3507 | |
fe2ebfc8 | 3508 | gcc_assert ((offset_adj % GET_MODE_SIZE (ymode)) == 0); |
04e579b6 | 3509 | gcc_assert ((mode_multiple % nregs_multiple) == 0); |
3510 | ||
fe2ebfc8 | 3511 | if (!rknown) |
3512 | { | |
3513 | info->representable_p = (!(y_offset_adj % (mode_multiple / nregs_multiple))); | |
3514 | rknown = true; | |
3515 | } | |
3516 | info->offset = (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode; | |
3517 | info->nregs = nregs_ymode; | |
3518 | } | |
3519 | ||
3520 | /* This function returns the regno offset of a subreg expression. | |
3521 | xregno - A regno of an inner hard subreg_reg (or what will become one). | |
3522 | xmode - The mode of xregno. | |
3523 | offset - The byte offset. | |
3524 | ymode - The mode of a top level SUBREG (or what may become one). | |
3525 | RETURN - The regno offset which would be used. */ | |
3526 | unsigned int | |
3754d046 | 3527 | subreg_regno_offset (unsigned int xregno, machine_mode xmode, |
3528 | unsigned int offset, machine_mode ymode) | |
fe2ebfc8 | 3529 | { |
3530 | struct subreg_info info; | |
3531 | subreg_get_info (xregno, xmode, offset, ymode, &info); | |
3532 | return info.offset; | |
3533 | } | |
3534 | ||
3535 | /* This function returns true when the offset is representable via | |
3536 | subreg_offset in the given regno. | |
3537 | xregno - A regno of an inner hard subreg_reg (or what will become one). | |
3538 | xmode - The mode of xregno. | |
3539 | offset - The byte offset. | |
3540 | ymode - The mode of a top level SUBREG (or what may become one). | |
3541 | RETURN - Whether the offset is representable. */ | |
3542 | bool | |
3754d046 | 3543 | subreg_offset_representable_p (unsigned int xregno, machine_mode xmode, |
3544 | unsigned int offset, machine_mode ymode) | |
fe2ebfc8 | 3545 | { |
3546 | struct subreg_info info; | |
3547 | subreg_get_info (xregno, xmode, offset, ymode, &info); | |
949bf6a9 | 3548 | return info.representable_p; |
d9b3752c | 3549 | } |
3550 | ||
5992d16a | 3551 | /* Return the number of a YMODE register to which |
3552 | ||
3553 | (subreg:YMODE (reg:XMODE XREGNO) OFFSET) | |
3554 | ||
3555 | can be simplified. Return -1 if the subreg can't be simplified. | |
3556 | ||
3557 | XREGNO is a hard register number. */ | |
3558 | ||
3559 | int | |
3754d046 | 3560 | simplify_subreg_regno (unsigned int xregno, machine_mode xmode, |
3561 | unsigned int offset, machine_mode ymode) | |
5992d16a | 3562 | { |
3563 | struct subreg_info info; | |
3564 | unsigned int yregno; | |
3565 | ||
3566 | #ifdef CANNOT_CHANGE_MODE_CLASS | |
3567 | /* Give the backend a chance to disallow the mode change. */ | |
3568 | if (GET_MODE_CLASS (xmode) != MODE_COMPLEX_INT | |
3569 | && GET_MODE_CLASS (xmode) != MODE_COMPLEX_FLOAT | |
c6a6cdaa | 3570 | && REG_CANNOT_CHANGE_MODE_P (xregno, xmode, ymode) |
3571 | /* We can use mode change in LRA for some transformations. */ | |
3572 | && ! lra_in_progress) | |
5992d16a | 3573 | return -1; |
3574 | #endif | |
3575 | ||
3576 | /* We shouldn't simplify stack-related registers. */ | |
3577 | if ((!reload_completed || frame_pointer_needed) | |
c461d390 | 3578 | && xregno == FRAME_POINTER_REGNUM) |
5992d16a | 3579 | return -1; |
3580 | ||
3581 | if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM | |
c78ae7d1 | 3582 | && xregno == ARG_POINTER_REGNUM) |
5992d16a | 3583 | return -1; |
3584 | ||
c6a6cdaa | 3585 | if (xregno == STACK_POINTER_REGNUM |
3586 | /* We should convert hard stack register in LRA if it is | |
3587 | possible. */ | |
3588 | && ! lra_in_progress) | |
5992d16a | 3589 | return -1; |
3590 | ||
3591 | /* Try to get the register offset. */ | |
3592 | subreg_get_info (xregno, xmode, offset, ymode, &info); | |
3593 | if (!info.representable_p) | |
3594 | return -1; | |
3595 | ||
3596 | /* Make sure that the offsetted register value is in range. */ | |
3597 | yregno = xregno + info.offset; | |
3598 | if (!HARD_REGISTER_NUM_P (yregno)) | |
3599 | return -1; | |
3600 | ||
3601 | /* See whether (reg:YMODE YREGNO) is valid. | |
3602 | ||
3603 | ??? We allow invalid registers if (reg:XMODE XREGNO) is also invalid. | |
7cb63246 | 3604 | This is a kludge to work around how complex FP arguments are passed |
3605 | on IA-64 and should be fixed. See PR target/49226. */ | |
5992d16a | 3606 | if (!HARD_REGNO_MODE_OK (yregno, ymode) |
3607 | && HARD_REGNO_MODE_OK (xregno, xmode)) | |
3608 | return -1; | |
3609 | ||
3610 | return (int) yregno; | |
3611 | } | |
3612 | ||
aa40f561 | 3613 | /* Return the final regno that a subreg expression refers to. */ |
2617fe26 | 3614 | unsigned int |
dd9b9fc5 | 3615 | subreg_regno (const_rtx x) |
701e46d0 | 3616 | { |
3617 | unsigned int ret; | |
3618 | rtx subreg = SUBREG_REG (x); | |
3619 | int regno = REGNO (subreg); | |
3620 | ||
2617fe26 | 3621 | ret = regno + subreg_regno_offset (regno, |
3622 | GET_MODE (subreg), | |
701e46d0 | 3623 | SUBREG_BYTE (x), |
3624 | GET_MODE (x)); | |
3625 | return ret; | |
3626 | ||
3627 | } | |
fe2ebfc8 | 3628 | |
3629 | /* Return the number of registers that a subreg expression refers | |
3630 | to. */ | |
3631 | unsigned int | |
dd9b9fc5 | 3632 | subreg_nregs (const_rtx x) |
dea7b504 | 3633 | { |
3634 | return subreg_nregs_with_regno (REGNO (SUBREG_REG (x)), x); | |
3635 | } | |
3636 | ||
3637 | /* Return the number of registers that a subreg REG with REGNO | |
3638 | expression refers to. This is a copy of the rtlanal.c:subreg_nregs | |
3639 | changed so that the regno can be passed in. */ | |
3640 | ||
3641 | unsigned int | |
3642 | subreg_nregs_with_regno (unsigned int regno, const_rtx x) | |
fe2ebfc8 | 3643 | { |
3644 | struct subreg_info info; | |
3645 | rtx subreg = SUBREG_REG (x); | |
fe2ebfc8 | 3646 | |
3647 | subreg_get_info (regno, GET_MODE (subreg), SUBREG_BYTE (x), GET_MODE (x), | |
3648 | &info); | |
3649 | return info.nregs; | |
3650 | } | |
3651 | ||
dea7b504 | 3652 | |
7c2cc97e | 3653 | struct parms_set_data |
3654 | { | |
3655 | int nregs; | |
3656 | HARD_REG_SET regs; | |
3657 | }; | |
3658 | ||
3659 | /* Helper function for noticing stores to parameter registers. */ | |
3660 | static void | |
81a410b1 | 3661 | parms_set (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data) |
7c2cc97e | 3662 | { |
f7f3687c | 3663 | struct parms_set_data *const d = (struct parms_set_data *) data; |
7c2cc97e | 3664 | if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER |
3665 | && TEST_HARD_REG_BIT (d->regs, REGNO (x))) | |
3666 | { | |
3667 | CLEAR_HARD_REG_BIT (d->regs, REGNO (x)); | |
3668 | d->nregs--; | |
3669 | } | |
3670 | } | |
3671 | ||
2617fe26 | 3672 | /* Look backward for first parameter to be loaded. |
a6971e98 | 3673 | Note that loads of all parameters will not necessarily be |
3674 | found if CSE has eliminated some of them (e.g., an argument | |
3675 | to the outer function is passed down as a parameter). | |
7c2cc97e | 3676 | Do not skip BOUNDARY. */ |
3ccd8550 | 3677 | rtx_insn * |
35f8d1c2 | 3678 | find_first_parameter_load (rtx_insn *call_insn, rtx_insn *boundary) |
7c2cc97e | 3679 | { |
3680 | struct parms_set_data parm; | |
35f8d1c2 | 3681 | rtx p; |
3682 | rtx_insn *before, *first_set; | |
7c2cc97e | 3683 | |
3684 | /* Since different machines initialize their parameter registers | |
3685 | in different orders, assume nothing. Collect the set of all | |
3686 | parameter registers. */ | |
3687 | CLEAR_HARD_REG_SET (parm.regs); | |
3688 | parm.nregs = 0; | |
3689 | for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1)) | |
3690 | if (GET_CODE (XEXP (p, 0)) == USE | |
8ad4c111 | 3691 | && REG_P (XEXP (XEXP (p, 0), 0))) |
7c2cc97e | 3692 | { |
04e579b6 | 3693 | gcc_assert (REGNO (XEXP (XEXP (p, 0), 0)) < FIRST_PSEUDO_REGISTER); |
7c2cc97e | 3694 | |
3695 | /* We only care about registers which can hold function | |
3696 | arguments. */ | |
3697 | if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0)))) | |
3698 | continue; | |
3699 | ||
3700 | SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0))); | |
3701 | parm.nregs++; | |
3702 | } | |
3703 | before = call_insn; | |
a6971e98 | 3704 | first_set = call_insn; |
7c2cc97e | 3705 | |
3706 | /* Search backward for the first set of a register in this set. */ | |
3707 | while (parm.nregs && before != boundary) | |
3708 | { | |
3709 | before = PREV_INSN (before); | |
3710 | ||
3711 | /* It is possible that some loads got CSEed from one call to | |
3712 | another. Stop in that case. */ | |
6d7dc5b9 | 3713 | if (CALL_P (before)) |
7c2cc97e | 3714 | break; |
3715 | ||
26551efd | 3716 | /* Our caller needs either ensure that we will find all sets |
7c2cc97e | 3717 | (in case code has not been optimized yet), or take care |
4a82352a | 3718 | for possible labels in a way by setting boundary to preceding |
7c2cc97e | 3719 | CODE_LABEL. */ |
6d7dc5b9 | 3720 | if (LABEL_P (before)) |
26551efd | 3721 | { |
04e579b6 | 3722 | gcc_assert (before == boundary); |
26551efd | 3723 | break; |
3724 | } | |
7c2cc97e | 3725 | |
bd10a7cd | 3726 | if (INSN_P (before)) |
a6971e98 | 3727 | { |
3728 | int nregs_old = parm.nregs; | |
3729 | note_stores (PATTERN (before), parms_set, &parm); | |
3730 | /* If we found something that did not set a parameter reg, | |
3731 | we're done. Do not keep going, as that might result | |
3732 | in hoisting an insn before the setting of a pseudo | |
3733 | that is used by the hoisted insn. */ | |
3734 | if (nregs_old != parm.nregs) | |
3735 | first_set = before; | |
3736 | else | |
3737 | break; | |
3738 | } | |
7c2cc97e | 3739 | } |
35f8d1c2 | 3740 | return first_set; |
7c2cc97e | 3741 | } |
fb20d6fa | 3742 | |
de132707 | 3743 | /* Return true if we should avoid inserting code between INSN and preceding |
fb20d6fa | 3744 | call instruction. */ |
3745 | ||
3746 | bool | |
2eb8c261 | 3747 | keep_with_call_p (const rtx_insn *insn) |
fb20d6fa | 3748 | { |
3749 | rtx set; | |
3750 | ||
3751 | if (INSN_P (insn) && (set = single_set (insn)) != NULL) | |
3752 | { | |
8ad4c111 | 3753 | if (REG_P (SET_DEST (set)) |
0c08cb26 | 3754 | && REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER |
fb20d6fa | 3755 | && fixed_regs[REGNO (SET_DEST (set))] |
3756 | && general_operand (SET_SRC (set), VOIDmode)) | |
3757 | return true; | |
8ad4c111 | 3758 | if (REG_P (SET_SRC (set)) |
e1ce1485 | 3759 | && targetm.calls.function_value_regno_p (REGNO (SET_SRC (set))) |
8ad4c111 | 3760 | && REG_P (SET_DEST (set)) |
fb20d6fa | 3761 | && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER) |
3762 | return true; | |
aee989f5 | 3763 | /* There may be a stack pop just after the call and before the store |
3764 | of the return register. Search for the actual store when deciding | |
3765 | if we can break or not. */ | |
fb20d6fa | 3766 | if (SET_DEST (set) == stack_pointer_rtx) |
3767 | { | |
ce4469fa | 3768 | /* This CONST_CAST is okay because next_nonnote_insn just |
5ca94202 | 3769 | returns its argument and we assign it to a const_rtx |
ce4469fa | 3770 | variable. */ |
2eb8c261 | 3771 | const rtx_insn *i2 |
3772 | = next_nonnote_insn (const_cast<rtx_insn *> (insn)); | |
aee989f5 | 3773 | if (i2 && keep_with_call_p (i2)) |
fb20d6fa | 3774 | return true; |
3775 | } | |
3776 | } | |
3777 | return false; | |
3778 | } | |
fa3cb24d | 3779 | |
b9de5542 | 3780 | /* Return true if LABEL is a target of JUMP_INSN. This applies only |
3781 | to non-complex jumps. That is, direct unconditional, conditional, | |
3782 | and tablejumps, but not computed jumps or returns. It also does | |
3783 | not apply to the fallthru case of a conditional jump. */ | |
3784 | ||
3785 | bool | |
28fbb2b5 | 3786 | label_is_jump_target_p (const_rtx label, const rtx_insn *jump_insn) |
b9de5542 | 3787 | { |
3788 | rtx tmp = JUMP_LABEL (jump_insn); | |
c86d86ff | 3789 | rtx_jump_table_data *table; |
b9de5542 | 3790 | |
3791 | if (label == tmp) | |
3792 | return true; | |
3793 | ||
c86d86ff | 3794 | if (tablejump_p (jump_insn, NULL, &table)) |
b9de5542 | 3795 | { |
b77639be | 3796 | rtvec vec = table->get_labels (); |
b9de5542 | 3797 | int i, veclen = GET_NUM_ELEM (vec); |
3798 | ||
3799 | for (i = 0; i < veclen; ++i) | |
3800 | if (XEXP (RTVEC_ELT (vec, i), 0) == label) | |
3801 | return true; | |
3802 | } | |
3803 | ||
a8d1dae0 | 3804 | if (find_reg_note (jump_insn, REG_LABEL_TARGET, label)) |
3805 | return true; | |
3806 | ||
b9de5542 | 3807 | return false; |
3808 | } | |
3809 | ||
26619827 | 3810 | \f |
3811 | /* Return an estimate of the cost of computing rtx X. | |
3812 | One use is in cse, to decide which expression to keep in the hash table. | |
3813 | Another is in rtl generation, to pick the cheapest way to multiply. | |
48e1416a | 3814 | Other uses like the latter are expected in the future. |
f529eb25 | 3815 | |
20d892d1 | 3816 | X appears as operand OPNO in an expression with code OUTER_CODE. |
3817 | SPEED specifies whether costs optimized for speed or size should | |
f529eb25 | 3818 | be returned. */ |
26619827 | 3819 | |
3820 | int | |
20d892d1 | 3821 | rtx_cost (rtx x, enum rtx_code outer_code, int opno, bool speed) |
26619827 | 3822 | { |
3823 | int i, j; | |
3824 | enum rtx_code code; | |
3825 | const char *fmt; | |
3826 | int total; | |
3b3b530a | 3827 | int factor; |
26619827 | 3828 | |
3829 | if (x == 0) | |
3830 | return 0; | |
3831 | ||
3b3b530a | 3832 | /* A size N times larger than UNITS_PER_WORD likely needs N times as |
3833 | many insns, taking N times as long. */ | |
3834 | factor = GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD; | |
3835 | if (factor == 0) | |
3836 | factor = 1; | |
3837 | ||
26619827 | 3838 | /* Compute the default costs of certain things. |
3839 | Note that targetm.rtx_costs can override the defaults. */ | |
3840 | ||
3841 | code = GET_CODE (x); | |
3842 | switch (code) | |
3843 | { | |
3844 | case MULT: | |
3b3b530a | 3845 | /* Multiplication has time-complexity O(N*N), where N is the |
3846 | number of units (translated from digits) when using | |
3847 | schoolbook long multiplication. */ | |
3848 | total = factor * factor * COSTS_N_INSNS (5); | |
26619827 | 3849 | break; |
3850 | case DIV: | |
3851 | case UDIV: | |
3852 | case MOD: | |
3853 | case UMOD: | |
3b3b530a | 3854 | /* Similarly, complexity for schoolbook long division. */ |
3855 | total = factor * factor * COSTS_N_INSNS (7); | |
26619827 | 3856 | break; |
3857 | case USE: | |
67a5e20a | 3858 | /* Used in combine.c as a marker. */ |
26619827 | 3859 | total = 0; |
3860 | break; | |
3b3b530a | 3861 | case SET: |
3862 | /* A SET doesn't have a mode, so let's look at the SET_DEST to get | |
3863 | the mode for the factor. */ | |
3864 | factor = GET_MODE_SIZE (GET_MODE (SET_DEST (x))) / UNITS_PER_WORD; | |
3865 | if (factor == 0) | |
3866 | factor = 1; | |
3867 | /* Pass through. */ | |
26619827 | 3868 | default: |
3b3b530a | 3869 | total = factor * COSTS_N_INSNS (1); |
26619827 | 3870 | } |
3871 | ||
3872 | switch (code) | |
3873 | { | |
3874 | case REG: | |
3875 | return 0; | |
3876 | ||
3877 | case SUBREG: | |
8eb9bb0e | 3878 | total = 0; |
26619827 | 3879 | /* If we can't tie these modes, make this expensive. The larger |
3880 | the mode, the more expensive it is. */ | |
3881 | if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x)))) | |
3b3b530a | 3882 | return COSTS_N_INSNS (2 + factor); |
26619827 | 3883 | break; |
3884 | ||
3885 | default: | |
20d892d1 | 3886 | if (targetm.rtx_costs (x, code, outer_code, opno, &total, speed)) |
26619827 | 3887 | return total; |
3888 | break; | |
3889 | } | |
3890 | ||
3891 | /* Sum the costs of the sub-rtx's, plus cost of this operation, | |
3892 | which is already in total. */ | |
3893 | ||
3894 | fmt = GET_RTX_FORMAT (code); | |
3895 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
3896 | if (fmt[i] == 'e') | |
20d892d1 | 3897 | total += rtx_cost (XEXP (x, i), code, i, speed); |
26619827 | 3898 | else if (fmt[i] == 'E') |
3899 | for (j = 0; j < XVECLEN (x, i); j++) | |
20d892d1 | 3900 | total += rtx_cost (XVECEXP (x, i, j), code, i, speed); |
26619827 | 3901 | |
3902 | return total; | |
3903 | } | |
c9a03487 | 3904 | |
3905 | /* Fill in the structure C with information about both speed and size rtx | |
20d892d1 | 3906 | costs for X, which is operand OPNO in an expression with code OUTER. */ |
c9a03487 | 3907 | |
3908 | void | |
20d892d1 | 3909 | get_full_rtx_cost (rtx x, enum rtx_code outer, int opno, |
3910 | struct full_rtx_costs *c) | |
c9a03487 | 3911 | { |
20d892d1 | 3912 | c->speed = rtx_cost (x, outer, opno, true); |
3913 | c->size = rtx_cost (x, outer, opno, false); | |
c9a03487 | 3914 | } |
3915 | ||
26619827 | 3916 | \f |
3917 | /* Return cost of address expression X. | |
48e1416a | 3918 | Expect that X is properly formed address reference. |
f529eb25 | 3919 | |
3920 | SPEED parameter specify whether costs optimized for speed or size should | |
3921 | be returned. */ | |
26619827 | 3922 | |
3923 | int | |
3754d046 | 3924 | address_cost (rtx x, machine_mode mode, addr_space_t as, bool speed) |
26619827 | 3925 | { |
26619827 | 3926 | /* We may be asked for cost of various unusual addresses, such as operands |
3927 | of push instruction. It is not worthwhile to complicate writing | |
3928 | of the target hook by such cases. */ | |
3929 | ||
bd1a81f7 | 3930 | if (!memory_address_addr_space_p (mode, x, as)) |
26619827 | 3931 | return 1000; |
3932 | ||
d9c5e5f4 | 3933 | return targetm.address_cost (x, mode, as, speed); |
26619827 | 3934 | } |
3935 | ||
3936 | /* If the target doesn't override, compute the cost as with arithmetic. */ | |
3937 | ||
3938 | int | |
3754d046 | 3939 | default_address_cost (rtx x, machine_mode, addr_space_t, bool speed) |
26619827 | 3940 | { |
20d892d1 | 3941 | return rtx_cost (x, MEM, 0, speed); |
26619827 | 3942 | } |
d263732c | 3943 | \f |
3944 | ||
3945 | unsigned HOST_WIDE_INT | |
3754d046 | 3946 | nonzero_bits (const_rtx x, machine_mode mode) |
d263732c | 3947 | { |
3948 | return cached_nonzero_bits (x, mode, NULL_RTX, VOIDmode, 0); | |
3949 | } | |
3950 | ||
3951 | unsigned int | |
3754d046 | 3952 | num_sign_bit_copies (const_rtx x, machine_mode mode) |
d263732c | 3953 | { |
3954 | return cached_num_sign_bit_copies (x, mode, NULL_RTX, VOIDmode, 0); | |
3955 | } | |
3956 | ||
3957 | /* The function cached_nonzero_bits is a wrapper around nonzero_bits1. | |
3958 | It avoids exponential behavior in nonzero_bits1 when X has | |
3959 | identical subexpressions on the first or the second level. */ | |
3960 | ||
3961 | static unsigned HOST_WIDE_INT | |
3754d046 | 3962 | cached_nonzero_bits (const_rtx x, machine_mode mode, const_rtx known_x, |
3963 | machine_mode known_mode, | |
d263732c | 3964 | unsigned HOST_WIDE_INT known_ret) |
3965 | { | |
3966 | if (x == known_x && mode == known_mode) | |
3967 | return known_ret; | |
3968 | ||
3969 | /* Try to find identical subexpressions. If found call | |
3970 | nonzero_bits1 on X with the subexpressions as KNOWN_X and the | |
3971 | precomputed value for the subexpression as KNOWN_RET. */ | |
3972 | ||
3973 | if (ARITHMETIC_P (x)) | |
3974 | { | |
3975 | rtx x0 = XEXP (x, 0); | |
3976 | rtx x1 = XEXP (x, 1); | |
3977 | ||
3978 | /* Check the first level. */ | |
3979 | if (x0 == x1) | |
3980 | return nonzero_bits1 (x, mode, x0, mode, | |
3981 | cached_nonzero_bits (x0, mode, known_x, | |
3982 | known_mode, known_ret)); | |
3983 | ||
3984 | /* Check the second level. */ | |
3985 | if (ARITHMETIC_P (x0) | |
3986 | && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1))) | |
3987 | return nonzero_bits1 (x, mode, x1, mode, | |
3988 | cached_nonzero_bits (x1, mode, known_x, | |
3989 | known_mode, known_ret)); | |
3990 | ||
3991 | if (ARITHMETIC_P (x1) | |
3992 | && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1))) | |
3993 | return nonzero_bits1 (x, mode, x0, mode, | |
3994 | cached_nonzero_bits (x0, mode, known_x, | |
3995 | known_mode, known_ret)); | |
3996 | } | |
3997 | ||
3998 | return nonzero_bits1 (x, mode, known_x, known_mode, known_ret); | |
3999 | } | |
4000 | ||
4001 | /* We let num_sign_bit_copies recur into nonzero_bits as that is useful. | |
4002 | We don't let nonzero_bits recur into num_sign_bit_copies, because that | |
4003 | is less useful. We can't allow both, because that results in exponential | |
4004 | run time recursion. There is a nullstone testcase that triggered | |
4005 | this. This macro avoids accidental uses of num_sign_bit_copies. */ | |
4006 | #define cached_num_sign_bit_copies sorry_i_am_preventing_exponential_behavior | |
4007 | ||
4008 | /* Given an expression, X, compute which bits in X can be nonzero. | |
4009 | We don't care about bits outside of those defined in MODE. | |
4010 | ||
4011 | For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is | |
4012 | an arithmetic operation, we can do better. */ | |
4013 | ||
4014 | static unsigned HOST_WIDE_INT | |
3754d046 | 4015 | nonzero_bits1 (const_rtx x, machine_mode mode, const_rtx known_x, |
4016 | machine_mode known_mode, | |
d263732c | 4017 | unsigned HOST_WIDE_INT known_ret) |
4018 | { | |
4019 | unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode); | |
4020 | unsigned HOST_WIDE_INT inner_nz; | |
4021 | enum rtx_code code; | |
3754d046 | 4022 | machine_mode inner_mode; |
ded805e6 | 4023 | unsigned int mode_width = GET_MODE_PRECISION (mode); |
d263732c | 4024 | |
6d5136ab | 4025 | /* For floating-point and vector values, assume all bits are needed. */ |
4026 | if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode) | |
4027 | || VECTOR_MODE_P (GET_MODE (x)) || VECTOR_MODE_P (mode)) | |
d263732c | 4028 | return nonzero; |
4029 | ||
4030 | /* If X is wider than MODE, use its mode instead. */ | |
ded805e6 | 4031 | if (GET_MODE_PRECISION (GET_MODE (x)) > mode_width) |
d263732c | 4032 | { |
4033 | mode = GET_MODE (x); | |
4034 | nonzero = GET_MODE_MASK (mode); | |
ded805e6 | 4035 | mode_width = GET_MODE_PRECISION (mode); |
d263732c | 4036 | } |
4037 | ||
4038 | if (mode_width > HOST_BITS_PER_WIDE_INT) | |
4039 | /* Our only callers in this case look for single bit values. So | |
4040 | just return the mode mask. Those tests will then be false. */ | |
4041 | return nonzero; | |
4042 | ||
4043 | #ifndef WORD_REGISTER_OPERATIONS | |
4044 | /* If MODE is wider than X, but both are a single word for both the host | |
4045 | and target machines, we can compute this from which bits of the | |
4046 | object might be nonzero in its own mode, taking into account the fact | |
4047 | that on many CISC machines, accessing an object in a wider mode | |
4048 | causes the high-order bits to become undefined. So they are | |
4049 | not known to be zero. */ | |
4050 | ||
4051 | if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode | |
ded805e6 | 4052 | && GET_MODE_PRECISION (GET_MODE (x)) <= BITS_PER_WORD |
4053 | && GET_MODE_PRECISION (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT | |
4054 | && GET_MODE_PRECISION (mode) > GET_MODE_PRECISION (GET_MODE (x))) | |
d263732c | 4055 | { |
4056 | nonzero &= cached_nonzero_bits (x, GET_MODE (x), | |
4057 | known_x, known_mode, known_ret); | |
4058 | nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x)); | |
4059 | return nonzero; | |
4060 | } | |
4061 | #endif | |
4062 | ||
4063 | code = GET_CODE (x); | |
4064 | switch (code) | |
4065 | { | |
4066 | case REG: | |
4067 | #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend) | |
4068 | /* If pointers extend unsigned and this is a pointer in Pmode, say that | |
4069 | all the bits above ptr_mode are known to be zero. */ | |
04ec15fa | 4070 | /* As we do not know which address space the pointer is referring to, |
98155838 | 4071 | we can do this only if the target does not support different pointer |
4072 | or address modes depending on the address space. */ | |
4073 | if (target_default_pointer_address_modes_p () | |
4074 | && POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode | |
d263732c | 4075 | && REG_POINTER (x)) |
4076 | nonzero &= GET_MODE_MASK (ptr_mode); | |
4077 | #endif | |
4078 | ||
4079 | /* Include declared information about alignment of pointers. */ | |
4080 | /* ??? We don't properly preserve REG_POINTER changes across | |
4081 | pointer-to-integer casts, so we can't trust it except for | |
4082 | things that we know must be pointers. See execute/960116-1.c. */ | |
4083 | if ((x == stack_pointer_rtx | |
4084 | || x == frame_pointer_rtx | |
4085 | || x == arg_pointer_rtx) | |
4086 | && REGNO_POINTER_ALIGN (REGNO (x))) | |
4087 | { | |
4088 | unsigned HOST_WIDE_INT alignment | |
4089 | = REGNO_POINTER_ALIGN (REGNO (x)) / BITS_PER_UNIT; | |
4090 | ||
4091 | #ifdef PUSH_ROUNDING | |
4092 | /* If PUSH_ROUNDING is defined, it is possible for the | |
4093 | stack to be momentarily aligned only to that amount, | |
4094 | so we pick the least alignment. */ | |
4095 | if (x == stack_pointer_rtx && PUSH_ARGS) | |
4096 | alignment = MIN ((unsigned HOST_WIDE_INT) PUSH_ROUNDING (1), | |
4097 | alignment); | |
4098 | #endif | |
4099 | ||
4100 | nonzero &= ~(alignment - 1); | |
4101 | } | |
4102 | ||
4103 | { | |
4104 | unsigned HOST_WIDE_INT nonzero_for_hook = nonzero; | |
47cfb7f4 | 4105 | rtx new_rtx = rtl_hooks.reg_nonzero_bits (x, mode, known_x, |
d263732c | 4106 | known_mode, known_ret, |
4107 | &nonzero_for_hook); | |
4108 | ||
47cfb7f4 | 4109 | if (new_rtx) |
4110 | nonzero_for_hook &= cached_nonzero_bits (new_rtx, mode, known_x, | |
d263732c | 4111 | known_mode, known_ret); |
4112 | ||
4113 | return nonzero_for_hook; | |
4114 | } | |
4115 | ||
4116 | case CONST_INT: | |
4117 | #ifdef SHORT_IMMEDIATES_SIGN_EXTEND | |
4118 | /* If X is negative in MODE, sign-extend the value. */ | |
9d8859f1 | 4119 | if (INTVAL (x) > 0 |
4120 | && mode_width < BITS_PER_WORD | |
4121 | && (UINTVAL (x) & ((unsigned HOST_WIDE_INT) 1 << (mode_width - 1))) | |
4122 | != 0) | |
561f0ec8 | 4123 | return UINTVAL (x) | (HOST_WIDE_INT_M1U << mode_width); |
d263732c | 4124 | #endif |
4125 | ||
9d8859f1 | 4126 | return UINTVAL (x); |
d263732c | 4127 | |
4128 | case MEM: | |
4129 | #ifdef LOAD_EXTEND_OP | |
4130 | /* In many, if not most, RISC machines, reading a byte from memory | |
4131 | zeros the rest of the register. Noticing that fact saves a lot | |
4132 | of extra zero-extends. */ | |
4133 | if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND) | |
4134 | nonzero &= GET_MODE_MASK (GET_MODE (x)); | |
4135 | #endif | |
4136 | break; | |
4137 | ||
4138 | case EQ: case NE: | |
4139 | case UNEQ: case LTGT: | |
4140 | case GT: case GTU: case UNGT: | |
4141 | case LT: case LTU: case UNLT: | |
4142 | case GE: case GEU: case UNGE: | |
4143 | case LE: case LEU: case UNLE: | |
4144 | case UNORDERED: case ORDERED: | |
d263732c | 4145 | /* If this produces an integer result, we know which bits are set. |
4146 | Code here used to clear bits outside the mode of X, but that is | |
4147 | now done above. */ | |
48e1416a | 4148 | /* Mind that MODE is the mode the caller wants to look at this |
4149 | operation in, and not the actual operation mode. We can wind | |
8850c3db | 4150 | up with (subreg:DI (gt:V4HI x y)), and we don't have anything |
4151 | that describes the results of a vector compare. */ | |
4152 | if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT | |
d263732c | 4153 | && mode_width <= HOST_BITS_PER_WIDE_INT) |
4154 | nonzero = STORE_FLAG_VALUE; | |
4155 | break; | |
4156 | ||
4157 | case NEG: | |
4158 | #if 0 | |
4159 | /* Disabled to avoid exponential mutual recursion between nonzero_bits | |
4160 | and num_sign_bit_copies. */ | |
4161 | if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x)) | |
ded805e6 | 4162 | == GET_MODE_PRECISION (GET_MODE (x))) |
d263732c | 4163 | nonzero = 1; |
4164 | #endif | |
4165 | ||
b0676cad | 4166 | if (GET_MODE_PRECISION (GET_MODE (x)) < mode_width) |
d263732c | 4167 | nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x))); |
4168 | break; | |
4169 | ||
4170 | case ABS: | |
4171 | #if 0 | |
4172 | /* Disabled to avoid exponential mutual recursion between nonzero_bits | |
4173 | and num_sign_bit_copies. */ | |
4174 | if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x)) | |
ded805e6 | 4175 | == GET_MODE_PRECISION (GET_MODE (x))) |
d263732c | 4176 | nonzero = 1; |
4177 | #endif | |
4178 | break; | |
4179 | ||
4180 | case TRUNCATE: | |
4181 | nonzero &= (cached_nonzero_bits (XEXP (x, 0), mode, | |
4182 | known_x, known_mode, known_ret) | |
4183 | & GET_MODE_MASK (mode)); | |
4184 | break; | |
4185 | ||
4186 | case ZERO_EXTEND: | |
4187 | nonzero &= cached_nonzero_bits (XEXP (x, 0), mode, | |
4188 | known_x, known_mode, known_ret); | |
4189 | if (GET_MODE (XEXP (x, 0)) != VOIDmode) | |
4190 | nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0))); | |
4191 | break; | |
4192 | ||
4193 | case SIGN_EXTEND: | |
4194 | /* If the sign bit is known clear, this is the same as ZERO_EXTEND. | |
4195 | Otherwise, show all the bits in the outer mode but not the inner | |
4196 | may be nonzero. */ | |
4197 | inner_nz = cached_nonzero_bits (XEXP (x, 0), mode, | |
4198 | known_x, known_mode, known_ret); | |
4199 | if (GET_MODE (XEXP (x, 0)) != VOIDmode) | |
4200 | { | |
4201 | inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0))); | |
f92430e0 | 4202 | if (val_signbit_known_set_p (GET_MODE (XEXP (x, 0)), inner_nz)) |
d263732c | 4203 | inner_nz |= (GET_MODE_MASK (mode) |
4204 | & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))); | |
4205 | } | |
4206 | ||
4207 | nonzero &= inner_nz; | |
4208 | break; | |
4209 | ||
4210 | case AND: | |
4211 | nonzero &= cached_nonzero_bits (XEXP (x, 0), mode, | |
4212 | known_x, known_mode, known_ret) | |
4213 | & cached_nonzero_bits (XEXP (x, 1), mode, | |
4214 | known_x, known_mode, known_ret); | |
4215 | break; | |
4216 | ||
4217 | case XOR: case IOR: | |
4218 | case UMIN: case UMAX: case SMIN: case SMAX: | |
4219 | { | |
9d8859f1 | 4220 | unsigned HOST_WIDE_INT nonzero0 |
4221 | = cached_nonzero_bits (XEXP (x, 0), mode, | |
4222 | known_x, known_mode, known_ret); | |
d263732c | 4223 | |
4224 | /* Don't call nonzero_bits for the second time if it cannot change | |
4225 | anything. */ | |
4226 | if ((nonzero & nonzero0) != nonzero) | |
4227 | nonzero &= nonzero0 | |
4228 | | cached_nonzero_bits (XEXP (x, 1), mode, | |
4229 | known_x, known_mode, known_ret); | |
4230 | } | |
4231 | break; | |
4232 | ||
4233 | case PLUS: case MINUS: | |
4234 | case MULT: | |
4235 | case DIV: case UDIV: | |
4236 | case MOD: case UMOD: | |
4237 | /* We can apply the rules of arithmetic to compute the number of | |
4238 | high- and low-order zero bits of these operations. We start by | |
4239 | computing the width (position of the highest-order nonzero bit) | |
4240 | and the number of low-order zero bits for each value. */ | |
4241 | { | |
9d8859f1 | 4242 | unsigned HOST_WIDE_INT nz0 |
4243 | = cached_nonzero_bits (XEXP (x, 0), mode, | |
4244 | known_x, known_mode, known_ret); | |
4245 | unsigned HOST_WIDE_INT nz1 | |
4246 | = cached_nonzero_bits (XEXP (x, 1), mode, | |
4247 | known_x, known_mode, known_ret); | |
ded805e6 | 4248 | int sign_index = GET_MODE_PRECISION (GET_MODE (x)) - 1; |
d263732c | 4249 | int width0 = floor_log2 (nz0) + 1; |
4250 | int width1 = floor_log2 (nz1) + 1; | |
4251 | int low0 = floor_log2 (nz0 & -nz0); | |
4252 | int low1 = floor_log2 (nz1 & -nz1); | |
9d8859f1 | 4253 | unsigned HOST_WIDE_INT op0_maybe_minusp |
4254 | = nz0 & ((unsigned HOST_WIDE_INT) 1 << sign_index); | |
4255 | unsigned HOST_WIDE_INT op1_maybe_minusp | |
4256 | = nz1 & ((unsigned HOST_WIDE_INT) 1 << sign_index); | |
d263732c | 4257 | unsigned int result_width = mode_width; |
4258 | int result_low = 0; | |
4259 | ||
4260 | switch (code) | |
4261 | { | |
4262 | case PLUS: | |
4263 | result_width = MAX (width0, width1) + 1; | |
4264 | result_low = MIN (low0, low1); | |
4265 | break; | |
4266 | case MINUS: | |
4267 | result_low = MIN (low0, low1); | |
4268 | break; | |
4269 | case MULT: | |
4270 | result_width = width0 + width1; | |
4271 | result_low = low0 + low1; | |
4272 | break; | |
4273 | case DIV: | |
4274 | if (width1 == 0) | |
4275 | break; | |
9d8859f1 | 4276 | if (!op0_maybe_minusp && !op1_maybe_minusp) |
d263732c | 4277 | result_width = width0; |
4278 | break; | |
4279 | case UDIV: | |
4280 | if (width1 == 0) | |
4281 | break; | |
4282 | result_width = width0; | |
4283 | break; | |
4284 | case MOD: | |
4285 | if (width1 == 0) | |
4286 | break; | |
9d8859f1 | 4287 | if (!op0_maybe_minusp && !op1_maybe_minusp) |
d263732c | 4288 | result_width = MIN (width0, width1); |
4289 | result_low = MIN (low0, low1); | |
4290 | break; | |
4291 | case UMOD: | |
4292 | if (width1 == 0) | |
4293 | break; | |
4294 | result_width = MIN (width0, width1); | |
4295 | result_low = MIN (low0, low1); | |
4296 | break; | |
4297 | default: | |
04e579b6 | 4298 | gcc_unreachable (); |
d263732c | 4299 | } |
4300 | ||
4301 | if (result_width < mode_width) | |
9d8859f1 | 4302 | nonzero &= ((unsigned HOST_WIDE_INT) 1 << result_width) - 1; |
d263732c | 4303 | |
4304 | if (result_low > 0) | |
9d8859f1 | 4305 | nonzero &= ~(((unsigned HOST_WIDE_INT) 1 << result_low) - 1); |
d263732c | 4306 | } |
4307 | break; | |
4308 | ||
4309 | case ZERO_EXTRACT: | |
971ba038 | 4310 | if (CONST_INT_P (XEXP (x, 1)) |
d263732c | 4311 | && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT) |
9d8859f1 | 4312 | nonzero &= ((unsigned HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1; |
d263732c | 4313 | break; |
4314 | ||
4315 | case SUBREG: | |
4316 | /* If this is a SUBREG formed for a promoted variable that has | |
4317 | been zero-extended, we know that at least the high-order bits | |
4318 | are zero, though others might be too. */ | |
4319 | ||
e8629f9e | 4320 | if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x)) |
d263732c | 4321 | nonzero = GET_MODE_MASK (GET_MODE (x)) |
4322 | & cached_nonzero_bits (SUBREG_REG (x), GET_MODE (x), | |
4323 | known_x, known_mode, known_ret); | |
4324 | ||
f92430e0 | 4325 | inner_mode = GET_MODE (SUBREG_REG (x)); |
d263732c | 4326 | /* If the inner mode is a single word for both the host and target |
4327 | machines, we can compute this from which bits of the inner | |
4328 | object might be nonzero. */ | |
ded805e6 | 4329 | if (GET_MODE_PRECISION (inner_mode) <= BITS_PER_WORD |
4330 | && (GET_MODE_PRECISION (inner_mode) <= HOST_BITS_PER_WIDE_INT)) | |
d263732c | 4331 | { |
4332 | nonzero &= cached_nonzero_bits (SUBREG_REG (x), mode, | |
4333 | known_x, known_mode, known_ret); | |
4334 | ||
4335 | #if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP) | |
4336 | /* If this is a typical RISC machine, we only have to worry | |
4337 | about the way loads are extended. */ | |
f92430e0 | 4338 | if ((LOAD_EXTEND_OP (inner_mode) == SIGN_EXTEND |
4339 | ? val_signbit_known_set_p (inner_mode, nonzero) | |
4340 | : LOAD_EXTEND_OP (inner_mode) != ZERO_EXTEND) | |
e16ceb8e | 4341 | || !MEM_P (SUBREG_REG (x))) |
d263732c | 4342 | #endif |
4343 | { | |
4344 | /* On many CISC machines, accessing an object in a wider mode | |
4345 | causes the high-order bits to become undefined. So they are | |
4346 | not known to be zero. */ | |
ded805e6 | 4347 | if (GET_MODE_PRECISION (GET_MODE (x)) |
4348 | > GET_MODE_PRECISION (inner_mode)) | |
d263732c | 4349 | nonzero |= (GET_MODE_MASK (GET_MODE (x)) |
f92430e0 | 4350 | & ~GET_MODE_MASK (inner_mode)); |
d263732c | 4351 | } |
4352 | } | |
4353 | break; | |
4354 | ||
4355 | case ASHIFTRT: | |
4356 | case LSHIFTRT: | |
4357 | case ASHIFT: | |
4358 | case ROTATE: | |
4359 | /* The nonzero bits are in two classes: any bits within MODE | |
4360 | that aren't in GET_MODE (x) are always significant. The rest of the | |
4361 | nonzero bits are those that are significant in the operand of | |
4362 | the shift when shifted the appropriate number of bits. This | |
4363 | shows that high-order bits are cleared by the right shift and | |
4364 | low-order bits by left shifts. */ | |
971ba038 | 4365 | if (CONST_INT_P (XEXP (x, 1)) |
d263732c | 4366 | && INTVAL (XEXP (x, 1)) >= 0 |
6026d749 | 4367 | && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT |
ded805e6 | 4368 | && INTVAL (XEXP (x, 1)) < GET_MODE_PRECISION (GET_MODE (x))) |
d263732c | 4369 | { |
3754d046 | 4370 | machine_mode inner_mode = GET_MODE (x); |
ded805e6 | 4371 | unsigned int width = GET_MODE_PRECISION (inner_mode); |
d263732c | 4372 | int count = INTVAL (XEXP (x, 1)); |
4373 | unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode); | |
9d8859f1 | 4374 | unsigned HOST_WIDE_INT op_nonzero |
4375 | = cached_nonzero_bits (XEXP (x, 0), mode, | |
4376 | known_x, known_mode, known_ret); | |
d263732c | 4377 | unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask; |
4378 | unsigned HOST_WIDE_INT outer = 0; | |
4379 | ||
4380 | if (mode_width > width) | |
4381 | outer = (op_nonzero & nonzero & ~mode_mask); | |
4382 | ||
4383 | if (code == LSHIFTRT) | |
4384 | inner >>= count; | |
4385 | else if (code == ASHIFTRT) | |
4386 | { | |
4387 | inner >>= count; | |
4388 | ||
4389 | /* If the sign bit may have been nonzero before the shift, we | |
4390 | need to mark all the places it could have been copied to | |
4391 | by the shift as possibly nonzero. */ | |
9d8859f1 | 4392 | if (inner & ((unsigned HOST_WIDE_INT) 1 << (width - 1 - count))) |
4393 | inner |= (((unsigned HOST_WIDE_INT) 1 << count) - 1) | |
4394 | << (width - count); | |
d263732c | 4395 | } |
4396 | else if (code == ASHIFT) | |
4397 | inner <<= count; | |
4398 | else | |
4399 | inner = ((inner << (count % width) | |
4400 | | (inner >> (width - (count % width)))) & mode_mask); | |
4401 | ||
4402 | nonzero &= (outer | inner); | |
4403 | } | |
4404 | break; | |
4405 | ||
4406 | case FFS: | |
4407 | case POPCOUNT: | |
4408 | /* This is at most the number of bits in the mode. */ | |
9d8859f1 | 4409 | nonzero = ((unsigned HOST_WIDE_INT) 2 << (floor_log2 (mode_width))) - 1; |
d263732c | 4410 | break; |
4411 | ||
4412 | case CLZ: | |
4413 | /* If CLZ has a known value at zero, then the nonzero bits are | |
4414 | that value, plus the number of bits in the mode minus one. */ | |
4415 | if (CLZ_DEFINED_VALUE_AT_ZERO (mode, nonzero)) | |
9d8859f1 | 4416 | nonzero |
4417 | |= ((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1; | |
d263732c | 4418 | else |
4419 | nonzero = -1; | |
4420 | break; | |
4421 | ||
4422 | case CTZ: | |
4423 | /* If CTZ has a known value at zero, then the nonzero bits are | |
4424 | that value, plus the number of bits in the mode minus one. */ | |
4425 | if (CTZ_DEFINED_VALUE_AT_ZERO (mode, nonzero)) | |
9d8859f1 | 4426 | nonzero |
4427 | |= ((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1; | |
d263732c | 4428 | else |
4429 | nonzero = -1; | |
4430 | break; | |
4431 | ||
3b23b4cc | 4432 | case CLRSB: |
4433 | /* This is at most the number of bits in the mode minus 1. */ | |
4434 | nonzero = ((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1; | |
4435 | break; | |
4436 | ||
d263732c | 4437 | case PARITY: |
4438 | nonzero = 1; | |
4439 | break; | |
4440 | ||
4441 | case IF_THEN_ELSE: | |
4442 | { | |
9d8859f1 | 4443 | unsigned HOST_WIDE_INT nonzero_true |
4444 | = cached_nonzero_bits (XEXP (x, 1), mode, | |
4445 | known_x, known_mode, known_ret); | |
d263732c | 4446 | |
4447 | /* Don't call nonzero_bits for the second time if it cannot change | |
4448 | anything. */ | |
4449 | if ((nonzero & nonzero_true) != nonzero) | |
4450 | nonzero &= nonzero_true | |
4451 | | cached_nonzero_bits (XEXP (x, 2), mode, | |
4452 | known_x, known_mode, known_ret); | |
4453 | } | |
4454 | break; | |
4455 | ||
4456 | default: | |
4457 | break; | |
4458 | } | |
4459 | ||
4460 | return nonzero; | |
4461 | } | |
4462 | ||
4463 | /* See the macro definition above. */ | |
4464 | #undef cached_num_sign_bit_copies | |
4465 | ||
4466 | \f | |
4467 | /* The function cached_num_sign_bit_copies is a wrapper around | |
4468 | num_sign_bit_copies1. It avoids exponential behavior in | |
4469 | num_sign_bit_copies1 when X has identical subexpressions on the | |
4470 | first or the second level. */ | |
4471 | ||
4472 | static unsigned int | |
3754d046 | 4473 | cached_num_sign_bit_copies (const_rtx x, machine_mode mode, const_rtx known_x, |
4474 | machine_mode known_mode, | |
d263732c | 4475 | unsigned int known_ret) |
4476 | { | |
4477 | if (x == known_x && mode == known_mode) | |
4478 | return known_ret; | |
4479 | ||
4480 | /* Try to find identical subexpressions. If found call | |
4481 | num_sign_bit_copies1 on X with the subexpressions as KNOWN_X and | |
4482 | the precomputed value for the subexpression as KNOWN_RET. */ | |
4483 | ||
4484 | if (ARITHMETIC_P (x)) | |
4485 | { | |
4486 | rtx x0 = XEXP (x, 0); | |
4487 | rtx x1 = XEXP (x, 1); | |
4488 | ||
4489 | /* Check the first level. */ | |
4490 | if (x0 == x1) | |
4491 | return | |
4492 | num_sign_bit_copies1 (x, mode, x0, mode, | |
4493 | cached_num_sign_bit_copies (x0, mode, known_x, | |
4494 | known_mode, | |
4495 | known_ret)); | |
4496 | ||
4497 | /* Check the second level. */ | |
4498 | if (ARITHMETIC_P (x0) | |
4499 | && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1))) | |
4500 | return | |
4501 | num_sign_bit_copies1 (x, mode, x1, mode, | |
4502 | cached_num_sign_bit_copies (x1, mode, known_x, | |
4503 | known_mode, | |
4504 | known_ret)); | |
4505 | ||
4506 | if (ARITHMETIC_P (x1) | |
4507 | && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1))) | |
4508 | return | |
4509 | num_sign_bit_copies1 (x, mode, x0, mode, | |
4510 | cached_num_sign_bit_copies (x0, mode, known_x, | |
4511 | known_mode, | |
4512 | known_ret)); | |
4513 | } | |
4514 | ||
4515 | return num_sign_bit_copies1 (x, mode, known_x, known_mode, known_ret); | |
4516 | } | |
4517 | ||
4518 | /* Return the number of bits at the high-order end of X that are known to | |
4519 | be equal to the sign bit. X will be used in mode MODE; if MODE is | |
4520 | VOIDmode, X will be used in its own mode. The returned value will always | |
4521 | be between 1 and the number of bits in MODE. */ | |
4522 | ||
4523 | static unsigned int | |
3754d046 | 4524 | num_sign_bit_copies1 (const_rtx x, machine_mode mode, const_rtx known_x, |
4525 | machine_mode known_mode, | |
d263732c | 4526 | unsigned int known_ret) |
4527 | { | |
4528 | enum rtx_code code = GET_CODE (x); | |
ded805e6 | 4529 | unsigned int bitwidth = GET_MODE_PRECISION (mode); |
d263732c | 4530 | int num0, num1, result; |
4531 | unsigned HOST_WIDE_INT nonzero; | |
4532 | ||
4533 | /* If we weren't given a mode, use the mode of X. If the mode is still | |
4534 | VOIDmode, we don't know anything. Likewise if one of the modes is | |
4535 | floating-point. */ | |
4536 | ||
4537 | if (mode == VOIDmode) | |
4538 | mode = GET_MODE (x); | |
4539 | ||
6d5136ab | 4540 | if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x)) |
4541 | || VECTOR_MODE_P (GET_MODE (x)) || VECTOR_MODE_P (mode)) | |
d263732c | 4542 | return 1; |
4543 | ||
4544 | /* For a smaller object, just ignore the high bits. */ | |
ded805e6 | 4545 | if (bitwidth < GET_MODE_PRECISION (GET_MODE (x))) |
d263732c | 4546 | { |
4547 | num0 = cached_num_sign_bit_copies (x, GET_MODE (x), | |
4548 | known_x, known_mode, known_ret); | |
4549 | return MAX (1, | |
ded805e6 | 4550 | num0 - (int) (GET_MODE_PRECISION (GET_MODE (x)) - bitwidth)); |
d263732c | 4551 | } |
4552 | ||
ded805e6 | 4553 | if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_PRECISION (GET_MODE (x))) |
d263732c | 4554 | { |
4555 | #ifndef WORD_REGISTER_OPERATIONS | |
ded805e6 | 4556 | /* If this machine does not do all register operations on the entire |
4557 | register and MODE is wider than the mode of X, we can say nothing | |
4558 | at all about the high-order bits. */ | |
d263732c | 4559 | return 1; |
4560 | #else | |
4561 | /* Likewise on machines that do, if the mode of the object is smaller | |
4562 | than a word and loads of that size don't sign extend, we can say | |
4563 | nothing about the high order bits. */ | |
ded805e6 | 4564 | if (GET_MODE_PRECISION (GET_MODE (x)) < BITS_PER_WORD |
d263732c | 4565 | #ifdef LOAD_EXTEND_OP |
4566 | && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND | |
4567 | #endif | |
4568 | ) | |
4569 | return 1; | |
4570 | #endif | |
4571 | } | |
4572 | ||
4573 | switch (code) | |
4574 | { | |
4575 | case REG: | |
4576 | ||
4577 | #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend) | |
4578 | /* If pointers extend signed and this is a pointer in Pmode, say that | |
4579 | all the bits above ptr_mode are known to be sign bit copies. */ | |
04ec15fa | 4580 | /* As we do not know which address space the pointer is referring to, |
98155838 | 4581 | we can do this only if the target does not support different pointer |
4582 | or address modes depending on the address space. */ | |
4583 | if (target_default_pointer_address_modes_p () | |
4584 | && ! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode | |
4585 | && mode == Pmode && REG_POINTER (x)) | |
ded805e6 | 4586 | return GET_MODE_PRECISION (Pmode) - GET_MODE_PRECISION (ptr_mode) + 1; |
d263732c | 4587 | #endif |
4588 | ||
4589 | { | |
4590 | unsigned int copies_for_hook = 1, copies = 1; | |
47cfb7f4 | 4591 | rtx new_rtx = rtl_hooks.reg_num_sign_bit_copies (x, mode, known_x, |
d263732c | 4592 | known_mode, known_ret, |
4593 | &copies_for_hook); | |
4594 | ||
47cfb7f4 | 4595 | if (new_rtx) |
4596 | copies = cached_num_sign_bit_copies (new_rtx, mode, known_x, | |
d263732c | 4597 | known_mode, known_ret); |
4598 | ||
4599 | if (copies > 1 || copies_for_hook > 1) | |
4600 | return MAX (copies, copies_for_hook); | |
4601 | ||
4602 | /* Else, use nonzero_bits to guess num_sign_bit_copies (see below). */ | |
4603 | } | |
4604 | break; | |
4605 | ||
4606 | case MEM: | |
4607 | #ifdef LOAD_EXTEND_OP | |
4608 | /* Some RISC machines sign-extend all loads of smaller than a word. */ | |
4609 | if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND) | |
4610 | return MAX (1, ((int) bitwidth | |
ded805e6 | 4611 | - (int) GET_MODE_PRECISION (GET_MODE (x)) + 1)); |
d263732c | 4612 | #endif |
4613 | break; | |
4614 | ||
4615 | case CONST_INT: | |
4616 | /* If the constant is negative, take its 1's complement and remask. | |
4617 | Then see how many zero bits we have. */ | |
9d8859f1 | 4618 | nonzero = UINTVAL (x) & GET_MODE_MASK (mode); |
d263732c | 4619 | if (bitwidth <= HOST_BITS_PER_WIDE_INT |
9d8859f1 | 4620 | && (nonzero & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0) |
d263732c | 4621 | nonzero = (~nonzero) & GET_MODE_MASK (mode); |
4622 | ||
4623 | return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1); | |
4624 | ||
4625 | case SUBREG: | |
4626 | /* If this is a SUBREG for a promoted object that is sign-extended | |
4627 | and we are looking at it in a wider mode, we know that at least the | |
4628 | high-order bits are known to be sign bit copies. */ | |
4629 | ||
e8629f9e | 4630 | if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_SIGNED_P (x)) |
d263732c | 4631 | { |
4632 | num0 = cached_num_sign_bit_copies (SUBREG_REG (x), mode, | |
4633 | known_x, known_mode, known_ret); | |
4634 | return MAX ((int) bitwidth | |
ded805e6 | 4635 | - (int) GET_MODE_PRECISION (GET_MODE (x)) + 1, |
d263732c | 4636 | num0); |
4637 | } | |
4638 | ||
4639 | /* For a smaller object, just ignore the high bits. */ | |
ded805e6 | 4640 | if (bitwidth <= GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x)))) |
d263732c | 4641 | { |
4642 | num0 = cached_num_sign_bit_copies (SUBREG_REG (x), VOIDmode, | |
4643 | known_x, known_mode, known_ret); | |
4644 | return MAX (1, (num0 | |
ded805e6 | 4645 | - (int) (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x))) |
d263732c | 4646 | - bitwidth))); |
4647 | } | |
4648 | ||
4649 | #ifdef WORD_REGISTER_OPERATIONS | |
4650 | #ifdef LOAD_EXTEND_OP | |
4651 | /* For paradoxical SUBREGs on machines where all register operations | |
4652 | affect the entire register, just look inside. Note that we are | |
4653 | passing MODE to the recursive call, so the number of sign bit copies | |
4654 | will remain relative to that mode, not the inner mode. */ | |
4655 | ||
4656 | /* This works only if loads sign extend. Otherwise, if we get a | |
4657 | reload for the inner part, it may be loaded from the stack, and | |
4658 | then we lose all sign bit copies that existed before the store | |
4659 | to the stack. */ | |
4660 | ||
b537bfdb | 4661 | if (paradoxical_subreg_p (x) |
d263732c | 4662 | && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND |
e16ceb8e | 4663 | && MEM_P (SUBREG_REG (x))) |
d263732c | 4664 | return cached_num_sign_bit_copies (SUBREG_REG (x), mode, |
4665 | known_x, known_mode, known_ret); | |
4666 | #endif | |
4667 | #endif | |
4668 | break; | |
4669 | ||
4670 | case SIGN_EXTRACT: | |
971ba038 | 4671 | if (CONST_INT_P (XEXP (x, 1))) |
d263732c | 4672 | return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1))); |
4673 | break; | |
4674 | ||
4675 | case SIGN_EXTEND: | |
ded805e6 | 4676 | return (bitwidth - GET_MODE_PRECISION (GET_MODE (XEXP (x, 0))) |
d263732c | 4677 | + cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode, |
4678 | known_x, known_mode, known_ret)); | |
4679 | ||
4680 | case TRUNCATE: | |
4681 | /* For a smaller object, just ignore the high bits. */ | |
4682 | num0 = cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode, | |
4683 | known_x, known_mode, known_ret); | |
ded805e6 | 4684 | return MAX (1, (num0 - (int) (GET_MODE_PRECISION (GET_MODE (XEXP (x, 0))) |
d263732c | 4685 | - bitwidth))); |
4686 | ||
4687 | case NOT: | |
4688 | return cached_num_sign_bit_copies (XEXP (x, 0), mode, | |
4689 | known_x, known_mode, known_ret); | |
4690 | ||
4691 | case ROTATE: case ROTATERT: | |
4692 | /* If we are rotating left by a number of bits less than the number | |
4693 | of sign bit copies, we can just subtract that amount from the | |
4694 | number. */ | |
971ba038 | 4695 | if (CONST_INT_P (XEXP (x, 1)) |
d263732c | 4696 | && INTVAL (XEXP (x, 1)) >= 0 |
4697 | && INTVAL (XEXP (x, 1)) < (int) bitwidth) | |
4698 | { | |
4699 | num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode, | |
4700 | known_x, known_mode, known_ret); | |
4701 | return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1)) | |
4702 | : (int) bitwidth - INTVAL (XEXP (x, 1)))); | |
4703 | } | |
4704 | break; | |
4705 | ||
4706 | case NEG: | |
4707 | /* In general, this subtracts one sign bit copy. But if the value | |
4708 | is known to be positive, the number of sign bit copies is the | |
4709 | same as that of the input. Finally, if the input has just one bit | |
4710 | that might be nonzero, all the bits are copies of the sign bit. */ | |
4711 | num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode, | |
4712 | known_x, known_mode, known_ret); | |
4713 | if (bitwidth > HOST_BITS_PER_WIDE_INT) | |
4714 | return num0 > 1 ? num0 - 1 : 1; | |
4715 | ||
4716 | nonzero = nonzero_bits (XEXP (x, 0), mode); | |
4717 | if (nonzero == 1) | |
4718 | return bitwidth; | |
4719 | ||
4720 | if (num0 > 1 | |
9d8859f1 | 4721 | && (((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero)) |
d263732c | 4722 | num0--; |
4723 | ||
4724 | return num0; | |
4725 | ||
4726 | case IOR: case AND: case XOR: | |
4727 | case SMIN: case SMAX: case UMIN: case UMAX: | |
4728 | /* Logical operations will preserve the number of sign-bit copies. | |
4729 | MIN and MAX operations always return one of the operands. */ | |
4730 | num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode, | |
4731 | known_x, known_mode, known_ret); | |
4732 | num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode, | |
4733 | known_x, known_mode, known_ret); | |
c07054e9 | 4734 | |
4735 | /* If num1 is clearing some of the top bits then regardless of | |
4736 | the other term, we are guaranteed to have at least that many | |
4737 | high-order zero bits. */ | |
4738 | if (code == AND | |
4739 | && num1 > 1 | |
4740 | && bitwidth <= HOST_BITS_PER_WIDE_INT | |
971ba038 | 4741 | && CONST_INT_P (XEXP (x, 1)) |
9d8859f1 | 4742 | && (UINTVAL (XEXP (x, 1)) |
4743 | & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) == 0) | |
c07054e9 | 4744 | return num1; |
4745 | ||
4746 | /* Similarly for IOR when setting high-order bits. */ | |
4747 | if (code == IOR | |
4748 | && num1 > 1 | |
4749 | && bitwidth <= HOST_BITS_PER_WIDE_INT | |
971ba038 | 4750 | && CONST_INT_P (XEXP (x, 1)) |
9d8859f1 | 4751 | && (UINTVAL (XEXP (x, 1)) |
4752 | & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0) | |
c07054e9 | 4753 | return num1; |
4754 | ||
d263732c | 4755 | return MIN (num0, num1); |
4756 | ||
4757 | case PLUS: case MINUS: | |
4758 | /* For addition and subtraction, we can have a 1-bit carry. However, | |
4759 | if we are subtracting 1 from a positive number, there will not | |
4760 | be such a carry. Furthermore, if the positive number is known to | |
4761 | be 0 or 1, we know the result is either -1 or 0. */ | |
4762 | ||
4763 | if (code == PLUS && XEXP (x, 1) == constm1_rtx | |
4764 | && bitwidth <= HOST_BITS_PER_WIDE_INT) | |
4765 | { | |
4766 | nonzero = nonzero_bits (XEXP (x, 0), mode); | |
9d8859f1 | 4767 | if ((((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0) |
d263732c | 4768 | return (nonzero == 1 || nonzero == 0 ? bitwidth |
4769 | : bitwidth - floor_log2 (nonzero) - 1); | |
4770 | } | |
4771 | ||
4772 | num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode, | |
4773 | known_x, known_mode, known_ret); | |
4774 | num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode, | |
4775 | known_x, known_mode, known_ret); | |
4776 | result = MAX (1, MIN (num0, num1) - 1); | |
4777 | ||
d263732c | 4778 | return result; |
4779 | ||
4780 | case MULT: | |
4781 | /* The number of bits of the product is the sum of the number of | |
4782 | bits of both terms. However, unless one of the terms if known | |
4783 | to be positive, we must allow for an additional bit since negating | |
4784 | a negative number can remove one sign bit copy. */ | |
4785 | ||
4786 | num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode, | |
4787 | known_x, known_mode, known_ret); | |
4788 | num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode, | |
4789 | known_x, known_mode, known_ret); | |
4790 | ||
4791 | result = bitwidth - (bitwidth - num0) - (bitwidth - num1); | |
4792 | if (result > 0 | |
4793 | && (bitwidth > HOST_BITS_PER_WIDE_INT | |
4794 | || (((nonzero_bits (XEXP (x, 0), mode) | |
9d8859f1 | 4795 | & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0) |
d263732c | 4796 | && ((nonzero_bits (XEXP (x, 1), mode) |
9d8859f1 | 4797 | & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) |
4798 | != 0)))) | |
d263732c | 4799 | result--; |
4800 | ||
4801 | return MAX (1, result); | |
4802 | ||
4803 | case UDIV: | |
4804 | /* The result must be <= the first operand. If the first operand | |
4805 | has the high bit set, we know nothing about the number of sign | |
4806 | bit copies. */ | |
4807 | if (bitwidth > HOST_BITS_PER_WIDE_INT) | |
4808 | return 1; | |
4809 | else if ((nonzero_bits (XEXP (x, 0), mode) | |
9d8859f1 | 4810 | & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0) |
d263732c | 4811 | return 1; |
4812 | else | |
4813 | return cached_num_sign_bit_copies (XEXP (x, 0), mode, | |
4814 | known_x, known_mode, known_ret); | |
4815 | ||
4816 | case UMOD: | |
3c2a960d | 4817 | /* The result must be <= the second operand. If the second operand |
4818 | has (or just might have) the high bit set, we know nothing about | |
4819 | the number of sign bit copies. */ | |
4820 | if (bitwidth > HOST_BITS_PER_WIDE_INT) | |
4821 | return 1; | |
4822 | else if ((nonzero_bits (XEXP (x, 1), mode) | |
9d8859f1 | 4823 | & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0) |
3c2a960d | 4824 | return 1; |
4825 | else | |
4826 | return cached_num_sign_bit_copies (XEXP (x, 1), mode, | |
d263732c | 4827 | known_x, known_mode, known_ret); |
4828 | ||
4829 | case DIV: | |
4830 | /* Similar to unsigned division, except that we have to worry about | |
4831 | the case where the divisor is negative, in which case we have | |
4832 | to add 1. */ | |
4833 | result = cached_num_sign_bit_copies (XEXP (x, 0), mode, | |
4834 | known_x, known_mode, known_ret); | |
4835 | if (result > 1 | |
4836 | && (bitwidth > HOST_BITS_PER_WIDE_INT | |
4837 | || (nonzero_bits (XEXP (x, 1), mode) | |
9d8859f1 | 4838 | & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)) |
d263732c | 4839 | result--; |
4840 | ||
4841 | return result; | |
4842 | ||
4843 | case MOD: | |
4844 | result = cached_num_sign_bit_copies (XEXP (x, 1), mode, | |
4845 | known_x, known_mode, known_ret); | |
4846 | if (result > 1 | |
4847 | && (bitwidth > HOST_BITS_PER_WIDE_INT | |
4848 | || (nonzero_bits (XEXP (x, 1), mode) | |
9d8859f1 | 4849 | & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)) |
d263732c | 4850 | result--; |
4851 | ||
4852 | return result; | |
4853 | ||
4854 | case ASHIFTRT: | |
4855 | /* Shifts by a constant add to the number of bits equal to the | |
4856 | sign bit. */ | |
4857 | num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode, | |
4858 | known_x, known_mode, known_ret); | |
971ba038 | 4859 | if (CONST_INT_P (XEXP (x, 1)) |
6026d749 | 4860 | && INTVAL (XEXP (x, 1)) > 0 |
ded805e6 | 4861 | && INTVAL (XEXP (x, 1)) < GET_MODE_PRECISION (GET_MODE (x))) |
d263732c | 4862 | num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1))); |
4863 | ||
4864 | return num0; | |
4865 | ||
4866 | case ASHIFT: | |
4867 | /* Left shifts destroy copies. */ | |
971ba038 | 4868 | if (!CONST_INT_P (XEXP (x, 1)) |
d263732c | 4869 | || INTVAL (XEXP (x, 1)) < 0 |
6026d749 | 4870 | || INTVAL (XEXP (x, 1)) >= (int) bitwidth |
ded805e6 | 4871 | || INTVAL (XEXP (x, 1)) >= GET_MODE_PRECISION (GET_MODE (x))) |
d263732c | 4872 | return 1; |
4873 | ||
4874 | num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode, | |
4875 | known_x, known_mode, known_ret); | |
4876 | return MAX (1, num0 - INTVAL (XEXP (x, 1))); | |
4877 | ||
4878 | case IF_THEN_ELSE: | |
4879 | num0 = cached_num_sign_bit_copies (XEXP (x, 1), mode, | |
4880 | known_x, known_mode, known_ret); | |
4881 | num1 = cached_num_sign_bit_copies (XEXP (x, 2), mode, | |
4882 | known_x, known_mode, known_ret); | |
4883 | return MIN (num0, num1); | |
4884 | ||
4885 | case EQ: case NE: case GE: case GT: case LE: case LT: | |
4886 | case UNEQ: case LTGT: case UNGE: case UNGT: case UNLE: case UNLT: | |
4887 | case GEU: case GTU: case LEU: case LTU: | |
4888 | case UNORDERED: case ORDERED: | |
4889 | /* If the constant is negative, take its 1's complement and remask. | |
4890 | Then see how many zero bits we have. */ | |
4891 | nonzero = STORE_FLAG_VALUE; | |
4892 | if (bitwidth <= HOST_BITS_PER_WIDE_INT | |
9d8859f1 | 4893 | && (nonzero & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0) |
d263732c | 4894 | nonzero = (~nonzero) & GET_MODE_MASK (mode); |
4895 | ||
4896 | return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1); | |
4897 | ||
4898 | default: | |
4899 | break; | |
4900 | } | |
4901 | ||
4902 | /* If we haven't been able to figure it out by one of the above rules, | |
4903 | see if some of the high-order bits are known to be zero. If so, | |
4904 | count those bits and return one less than that amount. If we can't | |
4905 | safely compute the mask for this mode, always return BITWIDTH. */ | |
4906 | ||
ded805e6 | 4907 | bitwidth = GET_MODE_PRECISION (mode); |
d263732c | 4908 | if (bitwidth > HOST_BITS_PER_WIDE_INT) |
4909 | return 1; | |
4910 | ||
4911 | nonzero = nonzero_bits (x, mode); | |
9d8859f1 | 4912 | return nonzero & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1)) |
d263732c | 4913 | ? 1 : bitwidth - floor_log2 (nonzero) - 1; |
4914 | } | |
0a8a047c | 4915 | |
4916 | /* Calculate the rtx_cost of a single instruction. A return value of | |
4917 | zero indicates an instruction pattern without a known cost. */ | |
4918 | ||
4919 | int | |
f529eb25 | 4920 | insn_rtx_cost (rtx pat, bool speed) |
0a8a047c | 4921 | { |
4922 | int i, cost; | |
4923 | rtx set; | |
4924 | ||
4925 | /* Extract the single set rtx from the instruction pattern. | |
4926 | We can't use single_set since we only have the pattern. */ | |
4927 | if (GET_CODE (pat) == SET) | |
4928 | set = pat; | |
4929 | else if (GET_CODE (pat) == PARALLEL) | |
4930 | { | |
4931 | set = NULL_RTX; | |
4932 | for (i = 0; i < XVECLEN (pat, 0); i++) | |
4933 | { | |
4934 | rtx x = XVECEXP (pat, 0, i); | |
4935 | if (GET_CODE (x) == SET) | |
4936 | { | |
4937 | if (set) | |
4938 | return 0; | |
4939 | set = x; | |
4940 | } | |
4941 | } | |
4942 | if (!set) | |
4943 | return 0; | |
4944 | } | |
4945 | else | |
4946 | return 0; | |
4947 | ||
7013e87c | 4948 | cost = set_src_cost (SET_SRC (set), speed); |
0a8a047c | 4949 | return cost > 0 ? cost : COSTS_N_INSNS (1); |
4950 | } | |
ea92ba80 | 4951 | |
9a416363 | 4952 | /* Returns estimate on cost of computing SEQ. */ |
4953 | ||
4954 | unsigned | |
4955 | seq_cost (const rtx_insn *seq, bool speed) | |
4956 | { | |
4957 | unsigned cost = 0; | |
4958 | rtx set; | |
4959 | ||
4960 | for (; seq; seq = NEXT_INSN (seq)) | |
4961 | { | |
4962 | set = single_set (seq); | |
4963 | if (set) | |
4964 | cost += set_rtx_cost (set, speed); | |
4965 | else | |
4966 | cost++; | |
4967 | } | |
4968 | ||
4969 | return cost; | |
4970 | } | |
4971 | ||
ea92ba80 | 4972 | /* Given an insn INSN and condition COND, return the condition in a |
4973 | canonical form to simplify testing by callers. Specifically: | |
4974 | ||
4975 | (1) The code will always be a comparison operation (EQ, NE, GT, etc.). | |
4976 | (2) Both operands will be machine operands; (cc0) will have been replaced. | |
4977 | (3) If an operand is a constant, it will be the second operand. | |
4978 | (4) (LE x const) will be replaced with (LT x <const+1>) and similarly | |
4979 | for GE, GEU, and LEU. | |
4980 | ||
4981 | If the condition cannot be understood, or is an inequality floating-point | |
4982 | comparison which needs to be reversed, 0 will be returned. | |
4983 | ||
4984 | If REVERSE is nonzero, then reverse the condition prior to canonizing it. | |
4985 | ||
4986 | If EARLIEST is nonzero, it is a pointer to a place where the earliest | |
4987 | insn used in locating the condition was found. If a replacement test | |
4988 | of the condition is desired, it should be placed in front of that | |
4989 | insn and we will be sure that the inputs are still valid. | |
4990 | ||
4991 | If WANT_REG is nonzero, we wish the condition to be relative to that | |
4992 | register, if possible. Therefore, do not canonicalize the condition | |
48e1416a | 4993 | further. If ALLOW_CC_MODE is nonzero, allow the condition returned |
ea92ba80 | 4994 | to be a compare to a CC mode register. |
4995 | ||
4996 | If VALID_AT_INSN_P, the condition must be valid at both *EARLIEST | |
4997 | and at INSN. */ | |
4998 | ||
4999 | rtx | |
2d650f54 | 5000 | canonicalize_condition (rtx_insn *insn, rtx cond, int reverse, |
5001 | rtx_insn **earliest, | |
ea92ba80 | 5002 | rtx want_reg, int allow_cc_mode, int valid_at_insn_p) |
5003 | { | |
5004 | enum rtx_code code; | |
2d650f54 | 5005 | rtx_insn *prev = insn; |
dd9b9fc5 | 5006 | const_rtx set; |
ea92ba80 | 5007 | rtx tem; |
5008 | rtx op0, op1; | |
5009 | int reverse_code = 0; | |
3754d046 | 5010 | machine_mode mode; |
64685a89 | 5011 | basic_block bb = BLOCK_FOR_INSN (insn); |
ea92ba80 | 5012 | |
5013 | code = GET_CODE (cond); | |
5014 | mode = GET_MODE (cond); | |
5015 | op0 = XEXP (cond, 0); | |
5016 | op1 = XEXP (cond, 1); | |
5017 | ||
5018 | if (reverse) | |
5019 | code = reversed_comparison_code (cond, insn); | |
5020 | if (code == UNKNOWN) | |
5021 | return 0; | |
5022 | ||
5023 | if (earliest) | |
5024 | *earliest = insn; | |
5025 | ||
5026 | /* If we are comparing a register with zero, see if the register is set | |
5027 | in the previous insn to a COMPARE or a comparison operation. Perform | |
5028 | the same tests as a function of STORE_FLAG_VALUE as find_comparison_args | |
5029 | in cse.c */ | |
5030 | ||
5031 | while ((GET_RTX_CLASS (code) == RTX_COMPARE | |
5032 | || GET_RTX_CLASS (code) == RTX_COMM_COMPARE) | |
5033 | && op1 == CONST0_RTX (GET_MODE (op0)) | |
5034 | && op0 != want_reg) | |
5035 | { | |
5036 | /* Set nonzero when we find something of interest. */ | |
5037 | rtx x = 0; | |
5038 | ||
ea92ba80 | 5039 | /* If comparison with cc0, import actual comparison from compare |
5040 | insn. */ | |
5041 | if (op0 == cc0_rtx) | |
5042 | { | |
5043 | if ((prev = prev_nonnote_insn (prev)) == 0 | |
5044 | || !NONJUMP_INSN_P (prev) | |
5045 | || (set = single_set (prev)) == 0 | |
5046 | || SET_DEST (set) != cc0_rtx) | |
5047 | return 0; | |
5048 | ||
5049 | op0 = SET_SRC (set); | |
5050 | op1 = CONST0_RTX (GET_MODE (op0)); | |
5051 | if (earliest) | |
5052 | *earliest = prev; | |
5053 | } | |
ea92ba80 | 5054 | |
5055 | /* If this is a COMPARE, pick up the two things being compared. */ | |
5056 | if (GET_CODE (op0) == COMPARE) | |
5057 | { | |
5058 | op1 = XEXP (op0, 1); | |
5059 | op0 = XEXP (op0, 0); | |
5060 | continue; | |
5061 | } | |
5062 | else if (!REG_P (op0)) | |
5063 | break; | |
5064 | ||
5065 | /* Go back to the previous insn. Stop if it is not an INSN. We also | |
5066 | stop if it isn't a single set or if it has a REG_INC note because | |
5067 | we don't want to bother dealing with it. */ | |
5068 | ||
5b8537a8 | 5069 | prev = prev_nonnote_nondebug_insn (prev); |
9845d120 | 5070 | |
5071 | if (prev == 0 | |
ea92ba80 | 5072 | || !NONJUMP_INSN_P (prev) |
64685a89 | 5073 | || FIND_REG_INC_NOTE (prev, NULL_RTX) |
5074 | /* In cfglayout mode, there do not have to be labels at the | |
5075 | beginning of a block, or jumps at the end, so the previous | |
5076 | conditions would not stop us when we reach bb boundary. */ | |
5077 | || BLOCK_FOR_INSN (prev) != bb) | |
ea92ba80 | 5078 | break; |
5079 | ||
5080 | set = set_of (op0, prev); | |
5081 | ||
5082 | if (set | |
5083 | && (GET_CODE (set) != SET | |
5084 | || !rtx_equal_p (SET_DEST (set), op0))) | |
5085 | break; | |
5086 | ||
5087 | /* If this is setting OP0, get what it sets it to if it looks | |
5088 | relevant. */ | |
5089 | if (set) | |
5090 | { | |
3754d046 | 5091 | machine_mode inner_mode = GET_MODE (SET_DEST (set)); |
ea92ba80 | 5092 | #ifdef FLOAT_STORE_FLAG_VALUE |
5093 | REAL_VALUE_TYPE fsfv; | |
5094 | #endif | |
5095 | ||
5096 | /* ??? We may not combine comparisons done in a CCmode with | |
5097 | comparisons not done in a CCmode. This is to aid targets | |
5098 | like Alpha that have an IEEE compliant EQ instruction, and | |
5099 | a non-IEEE compliant BEQ instruction. The use of CCmode is | |
5100 | actually artificial, simply to prevent the combination, but | |
5101 | should not affect other platforms. | |
5102 | ||
5103 | However, we must allow VOIDmode comparisons to match either | |
5104 | CCmode or non-CCmode comparison, because some ports have | |
5105 | modeless comparisons inside branch patterns. | |
5106 | ||
5107 | ??? This mode check should perhaps look more like the mode check | |
5108 | in simplify_comparison in combine. */ | |
d7f4ca1d | 5109 | if (((GET_MODE_CLASS (mode) == MODE_CC) |
5110 | != (GET_MODE_CLASS (inner_mode) == MODE_CC)) | |
5111 | && mode != VOIDmode | |
5112 | && inner_mode != VOIDmode) | |
5113 | break; | |
5114 | if (GET_CODE (SET_SRC (set)) == COMPARE | |
5115 | || (((code == NE | |
5116 | || (code == LT | |
5117 | && val_signbit_known_set_p (inner_mode, | |
5118 | STORE_FLAG_VALUE)) | |
ea92ba80 | 5119 | #ifdef FLOAT_STORE_FLAG_VALUE |
d7f4ca1d | 5120 | || (code == LT |
5121 | && SCALAR_FLOAT_MODE_P (inner_mode) | |
5122 | && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode), | |
5123 | REAL_VALUE_NEGATIVE (fsfv))) | |
ea92ba80 | 5124 | #endif |
d7f4ca1d | 5125 | )) |
5126 | && COMPARISON_P (SET_SRC (set)))) | |
ea92ba80 | 5127 | x = SET_SRC (set); |
5128 | else if (((code == EQ | |
5129 | || (code == GE | |
f92430e0 | 5130 | && val_signbit_known_set_p (inner_mode, |
5131 | STORE_FLAG_VALUE)) | |
ea92ba80 | 5132 | #ifdef FLOAT_STORE_FLAG_VALUE |
5133 | || (code == GE | |
cee7491d | 5134 | && SCALAR_FLOAT_MODE_P (inner_mode) |
ea92ba80 | 5135 | && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode), |
5136 | REAL_VALUE_NEGATIVE (fsfv))) | |
5137 | #endif | |
5138 | )) | |
d7f4ca1d | 5139 | && COMPARISON_P (SET_SRC (set))) |
ea92ba80 | 5140 | { |
5141 | reverse_code = 1; | |
5142 | x = SET_SRC (set); | |
5143 | } | |
d7f4ca1d | 5144 | else if ((code == EQ || code == NE) |
5145 | && GET_CODE (SET_SRC (set)) == XOR) | |
5146 | /* Handle sequences like: | |
5147 | ||
5148 | (set op0 (xor X Y)) | |
5149 | ...(eq|ne op0 (const_int 0))... | |
5150 | ||
5151 | in which case: | |
5152 | ||
5153 | (eq op0 (const_int 0)) reduces to (eq X Y) | |
5154 | (ne op0 (const_int 0)) reduces to (ne X Y) | |
5155 | ||
5156 | This is the form used by MIPS16, for example. */ | |
5157 | x = SET_SRC (set); | |
ea92ba80 | 5158 | else |
5159 | break; | |
5160 | } | |
5161 | ||
5162 | else if (reg_set_p (op0, prev)) | |
5163 | /* If this sets OP0, but not directly, we have to give up. */ | |
5164 | break; | |
5165 | ||
5166 | if (x) | |
5167 | { | |
5168 | /* If the caller is expecting the condition to be valid at INSN, | |
5169 | make sure X doesn't change before INSN. */ | |
5170 | if (valid_at_insn_p) | |
5171 | if (modified_in_p (x, prev) || modified_between_p (x, prev, insn)) | |
5172 | break; | |
5173 | if (COMPARISON_P (x)) | |
5174 | code = GET_CODE (x); | |
5175 | if (reverse_code) | |
5176 | { | |
5177 | code = reversed_comparison_code (x, prev); | |
5178 | if (code == UNKNOWN) | |
5179 | return 0; | |
5180 | reverse_code = 0; | |
5181 | } | |
5182 | ||
5183 | op0 = XEXP (x, 0), op1 = XEXP (x, 1); | |
5184 | if (earliest) | |
5185 | *earliest = prev; | |
5186 | } | |
5187 | } | |
5188 | ||
5189 | /* If constant is first, put it last. */ | |
5190 | if (CONSTANT_P (op0)) | |
5191 | code = swap_condition (code), tem = op0, op0 = op1, op1 = tem; | |
5192 | ||
5193 | /* If OP0 is the result of a comparison, we weren't able to find what | |
5194 | was really being compared, so fail. */ | |
5195 | if (!allow_cc_mode | |
5196 | && GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC) | |
5197 | return 0; | |
5198 | ||
5199 | /* Canonicalize any ordered comparison with integers involving equality | |
5200 | if we can do computations in the relevant mode and we do not | |
5201 | overflow. */ | |
5202 | ||
5203 | if (GET_MODE_CLASS (GET_MODE (op0)) != MODE_CC | |
971ba038 | 5204 | && CONST_INT_P (op1) |
ea92ba80 | 5205 | && GET_MODE (op0) != VOIDmode |
ded805e6 | 5206 | && GET_MODE_PRECISION (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT) |
ea92ba80 | 5207 | { |
5208 | HOST_WIDE_INT const_val = INTVAL (op1); | |
5209 | unsigned HOST_WIDE_INT uconst_val = const_val; | |
5210 | unsigned HOST_WIDE_INT max_val | |
5211 | = (unsigned HOST_WIDE_INT) GET_MODE_MASK (GET_MODE (op0)); | |
5212 | ||
5213 | switch (code) | |
5214 | { | |
5215 | case LE: | |
5216 | if ((unsigned HOST_WIDE_INT) const_val != max_val >> 1) | |
5217 | code = LT, op1 = gen_int_mode (const_val + 1, GET_MODE (op0)); | |
5218 | break; | |
5219 | ||
5220 | /* When cross-compiling, const_val might be sign-extended from | |
5221 | BITS_PER_WORD to HOST_BITS_PER_WIDE_INT */ | |
5222 | case GE: | |
9d8859f1 | 5223 | if ((const_val & max_val) |
5224 | != ((unsigned HOST_WIDE_INT) 1 | |
ded805e6 | 5225 | << (GET_MODE_PRECISION (GET_MODE (op0)) - 1))) |
ea92ba80 | 5226 | code = GT, op1 = gen_int_mode (const_val - 1, GET_MODE (op0)); |
5227 | break; | |
5228 | ||
5229 | case LEU: | |
5230 | if (uconst_val < max_val) | |
5231 | code = LTU, op1 = gen_int_mode (uconst_val + 1, GET_MODE (op0)); | |
5232 | break; | |
5233 | ||
5234 | case GEU: | |
5235 | if (uconst_val != 0) | |
5236 | code = GTU, op1 = gen_int_mode (uconst_val - 1, GET_MODE (op0)); | |
5237 | break; | |
5238 | ||
5239 | default: | |
5240 | break; | |
5241 | } | |
5242 | } | |
5243 | ||
5244 | /* Never return CC0; return zero instead. */ | |
5245 | if (CC0_P (op0)) | |
5246 | return 0; | |
5247 | ||
5248 | return gen_rtx_fmt_ee (code, VOIDmode, op0, op1); | |
5249 | } | |
5250 | ||
5251 | /* Given a jump insn JUMP, return the condition that will cause it to branch | |
5252 | to its JUMP_LABEL. If the condition cannot be understood, or is an | |
5253 | inequality floating-point comparison which needs to be reversed, 0 will | |
5254 | be returned. | |
5255 | ||
5256 | If EARLIEST is nonzero, it is a pointer to a place where the earliest | |
5257 | insn used in locating the condition was found. If a replacement test | |
5258 | of the condition is desired, it should be placed in front of that | |
5259 | insn and we will be sure that the inputs are still valid. If EARLIEST | |
5260 | is null, the returned condition will be valid at INSN. | |
5261 | ||
5262 | If ALLOW_CC_MODE is nonzero, allow the condition returned to be a | |
5263 | compare CC mode register. | |
5264 | ||
5265 | VALID_AT_INSN_P is the same as for canonicalize_condition. */ | |
5266 | ||
5267 | rtx | |
2d650f54 | 5268 | get_condition (rtx_insn *jump, rtx_insn **earliest, int allow_cc_mode, |
5269 | int valid_at_insn_p) | |
ea92ba80 | 5270 | { |
5271 | rtx cond; | |
5272 | int reverse; | |
5273 | rtx set; | |
5274 | ||
5275 | /* If this is not a standard conditional jump, we can't parse it. */ | |
5276 | if (!JUMP_P (jump) | |
5277 | || ! any_condjump_p (jump)) | |
5278 | return 0; | |
5279 | set = pc_set (jump); | |
5280 | ||
5281 | cond = XEXP (SET_SRC (set), 0); | |
5282 | ||
5283 | /* If this branches to JUMP_LABEL when the condition is false, reverse | |
5284 | the condition. */ | |
5285 | reverse | |
5286 | = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF | |
b49f2e4b | 5287 | && LABEL_REF_LABEL (XEXP (SET_SRC (set), 2)) == JUMP_LABEL (jump); |
ea92ba80 | 5288 | |
5289 | return canonicalize_condition (jump, cond, reverse, earliest, NULL_RTX, | |
5290 | allow_cc_mode, valid_at_insn_p); | |
5291 | } | |
5292 | ||
4956440a | 5293 | /* Initialize the table NUM_SIGN_BIT_COPIES_IN_REP based on |
5294 | TARGET_MODE_REP_EXTENDED. | |
5295 | ||
5296 | Note that we assume that the property of | |
5297 | TARGET_MODE_REP_EXTENDED(B, C) is sticky to the integral modes | |
5298 | narrower than mode B. I.e., if A is a mode narrower than B then in | |
5299 | order to be able to operate on it in mode B, mode A needs to | |
5300 | satisfy the requirements set by the representation of mode B. */ | |
5301 | ||
5302 | static void | |
5303 | init_num_sign_bit_copies_in_rep (void) | |
5304 | { | |
3754d046 | 5305 | machine_mode mode, in_mode; |
4956440a | 5306 | |
5307 | for (in_mode = GET_CLASS_NARROWEST_MODE (MODE_INT); in_mode != VOIDmode; | |
5308 | in_mode = GET_MODE_WIDER_MODE (mode)) | |
5309 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != in_mode; | |
5310 | mode = GET_MODE_WIDER_MODE (mode)) | |
5311 | { | |
3754d046 | 5312 | machine_mode i; |
4956440a | 5313 | |
5314 | /* Currently, it is assumed that TARGET_MODE_REP_EXTENDED | |
5315 | extends to the next widest mode. */ | |
5316 | gcc_assert (targetm.mode_rep_extended (mode, in_mode) == UNKNOWN | |
5317 | || GET_MODE_WIDER_MODE (mode) == in_mode); | |
5318 | ||
5319 | /* We are in in_mode. Count how many bits outside of mode | |
5320 | have to be copies of the sign-bit. */ | |
5321 | for (i = mode; i != in_mode; i = GET_MODE_WIDER_MODE (i)) | |
5322 | { | |
3754d046 | 5323 | machine_mode wider = GET_MODE_WIDER_MODE (i); |
4956440a | 5324 | |
5325 | if (targetm.mode_rep_extended (i, wider) == SIGN_EXTEND | |
5326 | /* We can only check sign-bit copies starting from the | |
5327 | top-bit. In order to be able to check the bits we | |
5328 | have already seen we pretend that subsequent bits | |
5329 | have to be sign-bit copies too. */ | |
5330 | || num_sign_bit_copies_in_rep [in_mode][mode]) | |
5331 | num_sign_bit_copies_in_rep [in_mode][mode] | |
ded805e6 | 5332 | += GET_MODE_PRECISION (wider) - GET_MODE_PRECISION (i); |
4956440a | 5333 | } |
5334 | } | |
5335 | } | |
5336 | ||
fd95fba4 | 5337 | /* Suppose that truncation from the machine mode of X to MODE is not a |
5338 | no-op. See if there is anything special about X so that we can | |
5339 | assume it already contains a truncated value of MODE. */ | |
5340 | ||
5341 | bool | |
3754d046 | 5342 | truncated_to_mode (machine_mode mode, const_rtx x) |
fd95fba4 | 5343 | { |
4956440a | 5344 | /* This register has already been used in MODE without explicit |
5345 | truncation. */ | |
5346 | if (REG_P (x) && rtl_hooks.reg_truncated_to_mode (mode, x)) | |
5347 | return true; | |
5348 | ||
5349 | /* See if we already satisfy the requirements of MODE. If yes we | |
5350 | can just switch to MODE. */ | |
5351 | if (num_sign_bit_copies_in_rep[GET_MODE (x)][mode] | |
5352 | && (num_sign_bit_copies (x, GET_MODE (x)) | |
5353 | >= num_sign_bit_copies_in_rep[GET_MODE (x)][mode] + 1)) | |
5354 | return true; | |
fd95fba4 | 5355 | |
4956440a | 5356 | return false; |
5357 | } | |
a87cf6e5 | 5358 | \f |
69924e56 | 5359 | /* Return true if RTX code CODE has a single sequence of zero or more |
5360 | "e" operands and no rtvec operands. Initialize its rtx_all_subrtx_bounds | |
5361 | entry in that case. */ | |
5362 | ||
5363 | static bool | |
5364 | setup_reg_subrtx_bounds (unsigned int code) | |
5365 | { | |
5366 | const char *format = GET_RTX_FORMAT ((enum rtx_code) code); | |
5367 | unsigned int i = 0; | |
5368 | for (; format[i] != 'e'; ++i) | |
5369 | { | |
5370 | if (!format[i]) | |
5371 | /* No subrtxes. Leave start and count as 0. */ | |
5372 | return true; | |
5373 | if (format[i] == 'E' || format[i] == 'V') | |
5374 | return false; | |
5375 | } | |
5376 | ||
5377 | /* Record the sequence of 'e's. */ | |
5378 | rtx_all_subrtx_bounds[code].start = i; | |
5379 | do | |
5380 | ++i; | |
5381 | while (format[i] == 'e'); | |
5382 | rtx_all_subrtx_bounds[code].count = i - rtx_all_subrtx_bounds[code].start; | |
5383 | /* rtl-iter.h relies on this. */ | |
5384 | gcc_checking_assert (rtx_all_subrtx_bounds[code].count <= 3); | |
5385 | ||
5386 | for (; format[i]; ++i) | |
5387 | if (format[i] == 'E' || format[i] == 'V' || format[i] == 'e') | |
5388 | return false; | |
5389 | ||
5390 | return true; | |
5391 | } | |
5392 | ||
e506ea62 | 5393 | /* Initialize rtx_all_subrtx_bounds. */ |
a87cf6e5 | 5394 | void |
5395 | init_rtlanal (void) | |
5396 | { | |
5397 | int i; | |
5398 | for (i = 0; i < NUM_RTX_CODE; i++) | |
5399 | { | |
69924e56 | 5400 | if (!setup_reg_subrtx_bounds (i)) |
5401 | rtx_all_subrtx_bounds[i].count = UCHAR_MAX; | |
5402 | if (GET_RTX_CLASS (i) != RTX_CONST_OBJ) | |
5403 | rtx_nonconst_subrtx_bounds[i] = rtx_all_subrtx_bounds[i]; | |
a87cf6e5 | 5404 | } |
4956440a | 5405 | |
5406 | init_num_sign_bit_copies_in_rep (); | |
a87cf6e5 | 5407 | } |
e207fd7a | 5408 | \f |
5409 | /* Check whether this is a constant pool constant. */ | |
5410 | bool | |
5411 | constant_pool_constant_p (rtx x) | |
5412 | { | |
5413 | x = avoid_constant_pool_reference (x); | |
78f1962f | 5414 | return CONST_DOUBLE_P (x); |
e207fd7a | 5415 | } |
d16b48d5 | 5416 | \f |
5417 | /* If M is a bitmask that selects a field of low-order bits within an item but | |
5418 | not the entire word, return the length of the field. Return -1 otherwise. | |
5419 | M is used in machine mode MODE. */ | |
5420 | ||
5421 | int | |
3754d046 | 5422 | low_bitmask_len (machine_mode mode, unsigned HOST_WIDE_INT m) |
d16b48d5 | 5423 | { |
5424 | if (mode != VOIDmode) | |
5425 | { | |
ded805e6 | 5426 | if (GET_MODE_PRECISION (mode) > HOST_BITS_PER_WIDE_INT) |
d16b48d5 | 5427 | return -1; |
5428 | m &= GET_MODE_MASK (mode); | |
5429 | } | |
5430 | ||
5431 | return exact_log2 (m + 1); | |
5432 | } | |
87cf5753 | 5433 | |
5434 | /* Return the mode of MEM's address. */ | |
5435 | ||
3754d046 | 5436 | machine_mode |
87cf5753 | 5437 | get_address_mode (rtx mem) |
5438 | { | |
3754d046 | 5439 | machine_mode mode; |
87cf5753 | 5440 | |
5441 | gcc_assert (MEM_P (mem)); | |
5442 | mode = GET_MODE (XEXP (mem, 0)); | |
5443 | if (mode != VOIDmode) | |
5444 | return mode; | |
5445 | return targetm.addr_space.address_mode (MEM_ADDR_SPACE (mem)); | |
5446 | } | |
21eb4639 | 5447 | \f |
5448 | /* Split up a CONST_DOUBLE or integer constant rtx | |
5449 | into two rtx's for single words, | |
5450 | storing in *FIRST the word that comes first in memory in the target | |
ddb1be65 | 5451 | and in *SECOND the other. |
e913b5cd | 5452 | |
5453 | TODO: This function needs to be rewritten to work on any size | |
5454 | integer. */ | |
21eb4639 | 5455 | |
5456 | void | |
5457 | split_double (rtx value, rtx *first, rtx *second) | |
5458 | { | |
5459 | if (CONST_INT_P (value)) | |
5460 | { | |
5461 | if (HOST_BITS_PER_WIDE_INT >= (2 * BITS_PER_WORD)) | |
5462 | { | |
5463 | /* In this case the CONST_INT holds both target words. | |
5464 | Extract the bits from it into two word-sized pieces. | |
5465 | Sign extend each half to HOST_WIDE_INT. */ | |
5466 | unsigned HOST_WIDE_INT low, high; | |
5467 | unsigned HOST_WIDE_INT mask, sign_bit, sign_extend; | |
5468 | unsigned bits_per_word = BITS_PER_WORD; | |
5469 | ||
5470 | /* Set sign_bit to the most significant bit of a word. */ | |
5471 | sign_bit = 1; | |
5472 | sign_bit <<= bits_per_word - 1; | |
5473 | ||
5474 | /* Set mask so that all bits of the word are set. We could | |
5475 | have used 1 << BITS_PER_WORD instead of basing the | |
5476 | calculation on sign_bit. However, on machines where | |
5477 | HOST_BITS_PER_WIDE_INT == BITS_PER_WORD, it could cause a | |
5478 | compiler warning, even though the code would never be | |
5479 | executed. */ | |
5480 | mask = sign_bit << 1; | |
5481 | mask--; | |
5482 | ||
5483 | /* Set sign_extend as any remaining bits. */ | |
5484 | sign_extend = ~mask; | |
5485 | ||
5486 | /* Pick the lower word and sign-extend it. */ | |
5487 | low = INTVAL (value); | |
5488 | low &= mask; | |
5489 | if (low & sign_bit) | |
5490 | low |= sign_extend; | |
5491 | ||
5492 | /* Pick the higher word, shifted to the least significant | |
5493 | bits, and sign-extend it. */ | |
5494 | high = INTVAL (value); | |
5495 | high >>= bits_per_word - 1; | |
5496 | high >>= 1; | |
5497 | high &= mask; | |
5498 | if (high & sign_bit) | |
5499 | high |= sign_extend; | |
5500 | ||
5501 | /* Store the words in the target machine order. */ | |
5502 | if (WORDS_BIG_ENDIAN) | |
5503 | { | |
5504 | *first = GEN_INT (high); | |
5505 | *second = GEN_INT (low); | |
5506 | } | |
5507 | else | |
5508 | { | |
5509 | *first = GEN_INT (low); | |
5510 | *second = GEN_INT (high); | |
5511 | } | |
5512 | } | |
5513 | else | |
5514 | { | |
5515 | /* The rule for using CONST_INT for a wider mode | |
5516 | is that we regard the value as signed. | |
5517 | So sign-extend it. */ | |
5518 | rtx high = (INTVAL (value) < 0 ? constm1_rtx : const0_rtx); | |
5519 | if (WORDS_BIG_ENDIAN) | |
5520 | { | |
5521 | *first = high; | |
5522 | *second = value; | |
5523 | } | |
5524 | else | |
5525 | { | |
5526 | *first = value; | |
5527 | *second = high; | |
5528 | } | |
5529 | } | |
5530 | } | |
e913b5cd | 5531 | else if (GET_CODE (value) == CONST_WIDE_INT) |
5532 | { | |
5533 | /* All of this is scary code and needs to be converted to | |
5534 | properly work with any size integer. */ | |
5535 | gcc_assert (CONST_WIDE_INT_NUNITS (value) == 2); | |
5536 | if (WORDS_BIG_ENDIAN) | |
5537 | { | |
5538 | *first = GEN_INT (CONST_WIDE_INT_ELT (value, 1)); | |
5539 | *second = GEN_INT (CONST_WIDE_INT_ELT (value, 0)); | |
5540 | } | |
5541 | else | |
5542 | { | |
5543 | *first = GEN_INT (CONST_WIDE_INT_ELT (value, 0)); | |
5544 | *second = GEN_INT (CONST_WIDE_INT_ELT (value, 1)); | |
5545 | } | |
5546 | } | |
78f1962f | 5547 | else if (!CONST_DOUBLE_P (value)) |
21eb4639 | 5548 | { |
5549 | if (WORDS_BIG_ENDIAN) | |
5550 | { | |
5551 | *first = const0_rtx; | |
5552 | *second = value; | |
5553 | } | |
5554 | else | |
5555 | { | |
5556 | *first = value; | |
5557 | *second = const0_rtx; | |
5558 | } | |
5559 | } | |
5560 | else if (GET_MODE (value) == VOIDmode | |
5561 | /* This is the old way we did CONST_DOUBLE integers. */ | |
5562 | || GET_MODE_CLASS (GET_MODE (value)) == MODE_INT) | |
5563 | { | |
5564 | /* In an integer, the words are defined as most and least significant. | |
5565 | So order them by the target's convention. */ | |
5566 | if (WORDS_BIG_ENDIAN) | |
5567 | { | |
5568 | *first = GEN_INT (CONST_DOUBLE_HIGH (value)); | |
5569 | *second = GEN_INT (CONST_DOUBLE_LOW (value)); | |
5570 | } | |
5571 | else | |
5572 | { | |
5573 | *first = GEN_INT (CONST_DOUBLE_LOW (value)); | |
5574 | *second = GEN_INT (CONST_DOUBLE_HIGH (value)); | |
5575 | } | |
5576 | } | |
5577 | else | |
5578 | { | |
5579 | REAL_VALUE_TYPE r; | |
5580 | long l[2]; | |
5581 | REAL_VALUE_FROM_CONST_DOUBLE (r, value); | |
5582 | ||
5583 | /* Note, this converts the REAL_VALUE_TYPE to the target's | |
5584 | format, splits up the floating point double and outputs | |
5585 | exactly 32 bits of it into each of l[0] and l[1] -- | |
5586 | not necessarily BITS_PER_WORD bits. */ | |
5587 | REAL_VALUE_TO_TARGET_DOUBLE (r, l); | |
5588 | ||
5589 | /* If 32 bits is an entire word for the target, but not for the host, | |
5590 | then sign-extend on the host so that the number will look the same | |
5591 | way on the host that it would on the target. See for instance | |
5592 | simplify_unary_operation. The #if is needed to avoid compiler | |
5593 | warnings. */ | |
5594 | ||
5595 | #if HOST_BITS_PER_LONG > 32 | |
5596 | if (BITS_PER_WORD < HOST_BITS_PER_LONG && BITS_PER_WORD == 32) | |
5597 | { | |
5598 | if (l[0] & ((long) 1 << 31)) | |
5599 | l[0] |= ((long) (-1) << 32); | |
5600 | if (l[1] & ((long) 1 << 31)) | |
5601 | l[1] |= ((long) (-1) << 32); | |
5602 | } | |
5603 | #endif | |
5604 | ||
5605 | *first = GEN_INT (l[0]); | |
5606 | *second = GEN_INT (l[1]); | |
5607 | } | |
5608 | } | |
5609 | ||
cc1925e7 | 5610 | /* Return true if X is a sign_extract or zero_extract from the least |
5611 | significant bit. */ | |
5612 | ||
5613 | static bool | |
5614 | lsb_bitfield_op_p (rtx x) | |
5615 | { | |
5616 | if (GET_RTX_CLASS (GET_CODE (x)) == RTX_BITFIELD_OPS) | |
5617 | { | |
3754d046 | 5618 | machine_mode mode = GET_MODE (XEXP (x, 0)); |
041222b1 | 5619 | HOST_WIDE_INT len = INTVAL (XEXP (x, 1)); |
cc1925e7 | 5620 | HOST_WIDE_INT pos = INTVAL (XEXP (x, 2)); |
5621 | ||
5622 | return (pos == (BITS_BIG_ENDIAN ? GET_MODE_PRECISION (mode) - len : 0)); | |
5623 | } | |
5624 | return false; | |
5625 | } | |
5626 | ||
1efe9e9d | 5627 | /* Strip outer address "mutations" from LOC and return a pointer to the |
5628 | inner value. If OUTER_CODE is nonnull, store the code of the innermost | |
5629 | stripped expression there. | |
5630 | ||
5631 | "Mutations" either convert between modes or apply some kind of | |
cc1925e7 | 5632 | extension, truncation or alignment. */ |
1efe9e9d | 5633 | |
5634 | rtx * | |
5635 | strip_address_mutations (rtx *loc, enum rtx_code *outer_code) | |
5636 | { | |
5637 | for (;;) | |
5638 | { | |
5639 | enum rtx_code code = GET_CODE (*loc); | |
5640 | if (GET_RTX_CLASS (code) == RTX_UNARY) | |
5641 | /* Things like SIGN_EXTEND, ZERO_EXTEND and TRUNCATE can be | |
5642 | used to convert between pointer sizes. */ | |
5643 | loc = &XEXP (*loc, 0); | |
cc1925e7 | 5644 | else if (lsb_bitfield_op_p (*loc)) |
5645 | /* A [SIGN|ZERO]_EXTRACT from the least significant bit effectively | |
5646 | acts as a combined truncation and extension. */ | |
5647 | loc = &XEXP (*loc, 0); | |
1efe9e9d | 5648 | else if (code == AND && CONST_INT_P (XEXP (*loc, 1))) |
5649 | /* (and ... (const_int -X)) is used to align to X bytes. */ | |
5650 | loc = &XEXP (*loc, 0); | |
6482f991 | 5651 | else if (code == SUBREG |
5652 | && !OBJECT_P (SUBREG_REG (*loc)) | |
5653 | && subreg_lowpart_p (*loc)) | |
5654 | /* (subreg (operator ...) ...) inside and is used for mode | |
5655 | conversion too. */ | |
3190d74b | 5656 | loc = &SUBREG_REG (*loc); |
1efe9e9d | 5657 | else |
5658 | return loc; | |
5659 | if (outer_code) | |
5660 | *outer_code = code; | |
5661 | } | |
5662 | } | |
5663 | ||
2de52d09 | 5664 | /* Return true if CODE applies some kind of scale. The scaled value is |
5665 | is the first operand and the scale is the second. */ | |
1efe9e9d | 5666 | |
5667 | static bool | |
2de52d09 | 5668 | binary_scale_code_p (enum rtx_code code) |
1efe9e9d | 5669 | { |
2de52d09 | 5670 | return (code == MULT |
5671 | || code == ASHIFT | |
5672 | /* Needed by ARM targets. */ | |
5673 | || code == ASHIFTRT | |
5674 | || code == LSHIFTRT | |
5675 | || code == ROTATE | |
5676 | || code == ROTATERT); | |
1efe9e9d | 5677 | } |
5678 | ||
2de52d09 | 5679 | /* If *INNER can be interpreted as a base, return a pointer to the inner term |
5680 | (see address_info). Return null otherwise. */ | |
1efe9e9d | 5681 | |
2de52d09 | 5682 | static rtx * |
5683 | get_base_term (rtx *inner) | |
1efe9e9d | 5684 | { |
2de52d09 | 5685 | if (GET_CODE (*inner) == LO_SUM) |
5686 | inner = strip_address_mutations (&XEXP (*inner, 0)); | |
5687 | if (REG_P (*inner) | |
5688 | || MEM_P (*inner) | |
8f184921 | 5689 | || GET_CODE (*inner) == SUBREG |
5690 | || GET_CODE (*inner) == SCRATCH) | |
2de52d09 | 5691 | return inner; |
5692 | return 0; | |
5693 | } | |
5694 | ||
5695 | /* If *INNER can be interpreted as an index, return a pointer to the inner term | |
5696 | (see address_info). Return null otherwise. */ | |
5697 | ||
5698 | static rtx * | |
5699 | get_index_term (rtx *inner) | |
1efe9e9d | 5700 | { |
2de52d09 | 5701 | /* At present, only constant scales are allowed. */ |
5702 | if (binary_scale_code_p (GET_CODE (*inner)) && CONSTANT_P (XEXP (*inner, 1))) | |
5703 | inner = strip_address_mutations (&XEXP (*inner, 0)); | |
5704 | if (REG_P (*inner) | |
5705 | || MEM_P (*inner) | |
b61d7819 | 5706 | || GET_CODE (*inner) == SUBREG |
5707 | || GET_CODE (*inner) == SCRATCH) | |
2de52d09 | 5708 | return inner; |
5709 | return 0; | |
1efe9e9d | 5710 | } |
5711 | ||
5712 | /* Set the segment part of address INFO to LOC, given that INNER is the | |
5713 | unmutated value. */ | |
5714 | ||
5715 | static void | |
5716 | set_address_segment (struct address_info *info, rtx *loc, rtx *inner) | |
5717 | { | |
1efe9e9d | 5718 | gcc_assert (!info->segment); |
5719 | info->segment = loc; | |
5720 | info->segment_term = inner; | |
5721 | } | |
5722 | ||
5723 | /* Set the base part of address INFO to LOC, given that INNER is the | |
5724 | unmutated value. */ | |
5725 | ||
5726 | static void | |
5727 | set_address_base (struct address_info *info, rtx *loc, rtx *inner) | |
5728 | { | |
1efe9e9d | 5729 | gcc_assert (!info->base); |
5730 | info->base = loc; | |
5731 | info->base_term = inner; | |
5732 | } | |
5733 | ||
5734 | /* Set the index part of address INFO to LOC, given that INNER is the | |
5735 | unmutated value. */ | |
5736 | ||
5737 | static void | |
5738 | set_address_index (struct address_info *info, rtx *loc, rtx *inner) | |
5739 | { | |
1efe9e9d | 5740 | gcc_assert (!info->index); |
5741 | info->index = loc; | |
5742 | info->index_term = inner; | |
5743 | } | |
5744 | ||
5745 | /* Set the displacement part of address INFO to LOC, given that INNER | |
5746 | is the constant term. */ | |
5747 | ||
5748 | static void | |
5749 | set_address_disp (struct address_info *info, rtx *loc, rtx *inner) | |
5750 | { | |
1efe9e9d | 5751 | gcc_assert (!info->disp); |
5752 | info->disp = loc; | |
5753 | info->disp_term = inner; | |
5754 | } | |
5755 | ||
5756 | /* INFO->INNER describes a {PRE,POST}_{INC,DEC} address. Set up the | |
5757 | rest of INFO accordingly. */ | |
5758 | ||
5759 | static void | |
5760 | decompose_incdec_address (struct address_info *info) | |
5761 | { | |
5762 | info->autoinc_p = true; | |
5763 | ||
5764 | rtx *base = &XEXP (*info->inner, 0); | |
5765 | set_address_base (info, base, base); | |
5766 | gcc_checking_assert (info->base == info->base_term); | |
5767 | ||
5768 | /* These addresses are only valid when the size of the addressed | |
5769 | value is known. */ | |
5770 | gcc_checking_assert (info->mode != VOIDmode); | |
5771 | } | |
5772 | ||
5773 | /* INFO->INNER describes a {PRE,POST}_MODIFY address. Set up the rest | |
5774 | of INFO accordingly. */ | |
5775 | ||
5776 | static void | |
5777 | decompose_automod_address (struct address_info *info) | |
5778 | { | |
5779 | info->autoinc_p = true; | |
5780 | ||
5781 | rtx *base = &XEXP (*info->inner, 0); | |
5782 | set_address_base (info, base, base); | |
5783 | gcc_checking_assert (info->base == info->base_term); | |
5784 | ||
5785 | rtx plus = XEXP (*info->inner, 1); | |
5786 | gcc_assert (GET_CODE (plus) == PLUS); | |
5787 | ||
5788 | info->base_term2 = &XEXP (plus, 0); | |
5789 | gcc_checking_assert (rtx_equal_p (*info->base_term, *info->base_term2)); | |
5790 | ||
5791 | rtx *step = &XEXP (plus, 1); | |
5792 | rtx *inner_step = strip_address_mutations (step); | |
5793 | if (CONSTANT_P (*inner_step)) | |
5794 | set_address_disp (info, step, inner_step); | |
5795 | else | |
5796 | set_address_index (info, step, inner_step); | |
5797 | } | |
5798 | ||
5799 | /* Treat *LOC as a tree of PLUS operands and store pointers to the summed | |
5800 | values in [PTR, END). Return a pointer to the end of the used array. */ | |
5801 | ||
5802 | static rtx ** | |
5803 | extract_plus_operands (rtx *loc, rtx **ptr, rtx **end) | |
5804 | { | |
5805 | rtx x = *loc; | |
5806 | if (GET_CODE (x) == PLUS) | |
5807 | { | |
5808 | ptr = extract_plus_operands (&XEXP (x, 0), ptr, end); | |
5809 | ptr = extract_plus_operands (&XEXP (x, 1), ptr, end); | |
5810 | } | |
5811 | else | |
5812 | { | |
5813 | gcc_assert (ptr != end); | |
5814 | *ptr++ = loc; | |
5815 | } | |
5816 | return ptr; | |
5817 | } | |
5818 | ||
5819 | /* Evaluate the likelihood of X being a base or index value, returning | |
5820 | positive if it is likely to be a base, negative if it is likely to be | |
5821 | an index, and 0 if we can't tell. Make the magnitude of the return | |
5822 | value reflect the amount of confidence we have in the answer. | |
5823 | ||
5824 | MODE, AS, OUTER_CODE and INDEX_CODE are as for ok_for_base_p_1. */ | |
5825 | ||
5826 | static int | |
3754d046 | 5827 | baseness (rtx x, machine_mode mode, addr_space_t as, |
1efe9e9d | 5828 | enum rtx_code outer_code, enum rtx_code index_code) |
5829 | { | |
1efe9e9d | 5830 | /* Believe *_POINTER unless the address shape requires otherwise. */ |
5831 | if (REG_P (x) && REG_POINTER (x)) | |
5832 | return 2; | |
5833 | if (MEM_P (x) && MEM_POINTER (x)) | |
5834 | return 2; | |
5835 | ||
5836 | if (REG_P (x) && HARD_REGISTER_P (x)) | |
5837 | { | |
5838 | /* X is a hard register. If it only fits one of the base | |
5839 | or index classes, choose that interpretation. */ | |
5840 | int regno = REGNO (x); | |
5841 | bool base_p = ok_for_base_p_1 (regno, mode, as, outer_code, index_code); | |
5842 | bool index_p = REGNO_OK_FOR_INDEX_P (regno); | |
5843 | if (base_p != index_p) | |
5844 | return base_p ? 1 : -1; | |
5845 | } | |
5846 | return 0; | |
5847 | } | |
5848 | ||
5849 | /* INFO->INNER describes a normal, non-automodified address. | |
5850 | Fill in the rest of INFO accordingly. */ | |
5851 | ||
5852 | static void | |
5853 | decompose_normal_address (struct address_info *info) | |
5854 | { | |
5855 | /* Treat the address as the sum of up to four values. */ | |
5856 | rtx *ops[4]; | |
5857 | size_t n_ops = extract_plus_operands (info->inner, ops, | |
5858 | ops + ARRAY_SIZE (ops)) - ops; | |
5859 | ||
5860 | /* If there is more than one component, any base component is in a PLUS. */ | |
5861 | if (n_ops > 1) | |
5862 | info->base_outer_code = PLUS; | |
5863 | ||
2de52d09 | 5864 | /* Try to classify each sum operand now. Leave those that could be |
5865 | either a base or an index in OPS. */ | |
1efe9e9d | 5866 | rtx *inner_ops[4]; |
5867 | size_t out = 0; | |
5868 | for (size_t in = 0; in < n_ops; ++in) | |
5869 | { | |
5870 | rtx *loc = ops[in]; | |
5871 | rtx *inner = strip_address_mutations (loc); | |
5872 | if (CONSTANT_P (*inner)) | |
5873 | set_address_disp (info, loc, inner); | |
5874 | else if (GET_CODE (*inner) == UNSPEC) | |
5875 | set_address_segment (info, loc, inner); | |
5876 | else | |
5877 | { | |
2de52d09 | 5878 | /* The only other possibilities are a base or an index. */ |
5879 | rtx *base_term = get_base_term (inner); | |
5880 | rtx *index_term = get_index_term (inner); | |
5881 | gcc_assert (base_term || index_term); | |
5882 | if (!base_term) | |
5883 | set_address_index (info, loc, index_term); | |
5884 | else if (!index_term) | |
5885 | set_address_base (info, loc, base_term); | |
5886 | else | |
5887 | { | |
5888 | gcc_assert (base_term == index_term); | |
5889 | ops[out] = loc; | |
5890 | inner_ops[out] = base_term; | |
5891 | ++out; | |
5892 | } | |
1efe9e9d | 5893 | } |
5894 | } | |
5895 | ||
5896 | /* Classify the remaining OPS members as bases and indexes. */ | |
5897 | if (out == 1) | |
5898 | { | |
2de52d09 | 5899 | /* If we haven't seen a base or an index yet, assume that this is |
5900 | the base. If we were confident that another term was the base | |
5901 | or index, treat the remaining operand as the other kind. */ | |
5902 | if (!info->base) | |
1efe9e9d | 5903 | set_address_base (info, ops[0], inner_ops[0]); |
5904 | else | |
5905 | set_address_index (info, ops[0], inner_ops[0]); | |
5906 | } | |
5907 | else if (out == 2) | |
5908 | { | |
5909 | /* In the event of a tie, assume the base comes first. */ | |
5910 | if (baseness (*inner_ops[0], info->mode, info->as, PLUS, | |
5911 | GET_CODE (*ops[1])) | |
5912 | >= baseness (*inner_ops[1], info->mode, info->as, PLUS, | |
5913 | GET_CODE (*ops[0]))) | |
5914 | { | |
5915 | set_address_base (info, ops[0], inner_ops[0]); | |
5916 | set_address_index (info, ops[1], inner_ops[1]); | |
5917 | } | |
5918 | else | |
5919 | { | |
5920 | set_address_base (info, ops[1], inner_ops[1]); | |
5921 | set_address_index (info, ops[0], inner_ops[0]); | |
5922 | } | |
5923 | } | |
5924 | else | |
5925 | gcc_assert (out == 0); | |
5926 | } | |
5927 | ||
5928 | /* Describe address *LOC in *INFO. MODE is the mode of the addressed value, | |
5929 | or VOIDmode if not known. AS is the address space associated with LOC. | |
5930 | OUTER_CODE is MEM if *LOC is a MEM address and ADDRESS otherwise. */ | |
5931 | ||
5932 | void | |
3754d046 | 5933 | decompose_address (struct address_info *info, rtx *loc, machine_mode mode, |
1efe9e9d | 5934 | addr_space_t as, enum rtx_code outer_code) |
5935 | { | |
5936 | memset (info, 0, sizeof (*info)); | |
5937 | info->mode = mode; | |
5938 | info->as = as; | |
5939 | info->addr_outer_code = outer_code; | |
5940 | info->outer = loc; | |
5941 | info->inner = strip_address_mutations (loc, &outer_code); | |
5942 | info->base_outer_code = outer_code; | |
5943 | switch (GET_CODE (*info->inner)) | |
5944 | { | |
5945 | case PRE_DEC: | |
5946 | case PRE_INC: | |
5947 | case POST_DEC: | |
5948 | case POST_INC: | |
5949 | decompose_incdec_address (info); | |
5950 | break; | |
5951 | ||
5952 | case PRE_MODIFY: | |
5953 | case POST_MODIFY: | |
5954 | decompose_automod_address (info); | |
5955 | break; | |
5956 | ||
5957 | default: | |
5958 | decompose_normal_address (info); | |
5959 | break; | |
5960 | } | |
5961 | } | |
5962 | ||
5963 | /* Describe address operand LOC in INFO. */ | |
5964 | ||
5965 | void | |
5966 | decompose_lea_address (struct address_info *info, rtx *loc) | |
5967 | { | |
5968 | decompose_address (info, loc, VOIDmode, ADDR_SPACE_GENERIC, ADDRESS); | |
5969 | } | |
5970 | ||
5971 | /* Describe the address of MEM X in INFO. */ | |
5972 | ||
5973 | void | |
5974 | decompose_mem_address (struct address_info *info, rtx x) | |
5975 | { | |
5976 | gcc_assert (MEM_P (x)); | |
5977 | decompose_address (info, &XEXP (x, 0), GET_MODE (x), | |
5978 | MEM_ADDR_SPACE (x), MEM); | |
5979 | } | |
5980 | ||
5981 | /* Update INFO after a change to the address it describes. */ | |
5982 | ||
5983 | void | |
5984 | update_address (struct address_info *info) | |
5985 | { | |
5986 | decompose_address (info, info->outer, info->mode, info->as, | |
5987 | info->addr_outer_code); | |
5988 | } | |
5989 | ||
5990 | /* Return the scale applied to *INFO->INDEX_TERM, or 0 if the index is | |
5991 | more complicated than that. */ | |
5992 | ||
5993 | HOST_WIDE_INT | |
5994 | get_index_scale (const struct address_info *info) | |
5995 | { | |
5996 | rtx index = *info->index; | |
5997 | if (GET_CODE (index) == MULT | |
5998 | && CONST_INT_P (XEXP (index, 1)) | |
5999 | && info->index_term == &XEXP (index, 0)) | |
6000 | return INTVAL (XEXP (index, 1)); | |
6001 | ||
6002 | if (GET_CODE (index) == ASHIFT | |
6003 | && CONST_INT_P (XEXP (index, 1)) | |
6004 | && info->index_term == &XEXP (index, 0)) | |
6005 | return (HOST_WIDE_INT) 1 << INTVAL (XEXP (index, 1)); | |
6006 | ||
6007 | if (info->index == info->index_term) | |
6008 | return 1; | |
6009 | ||
6010 | return 0; | |
6011 | } | |
6012 | ||
6013 | /* Return the "index code" of INFO, in the form required by | |
6014 | ok_for_base_p_1. */ | |
6015 | ||
6016 | enum rtx_code | |
6017 | get_index_code (const struct address_info *info) | |
6018 | { | |
6019 | if (info->index) | |
6020 | return GET_CODE (*info->index); | |
6021 | ||
6022 | if (info->disp) | |
6023 | return GET_CODE (*info->disp); | |
6024 | ||
6025 | return SCRATCH; | |
6026 | } | |
53ea4c57 | 6027 | |
53ea4c57 | 6028 | /* Return true if X contains a thread-local symbol. */ |
6029 | ||
6030 | bool | |
7739c19e | 6031 | tls_referenced_p (const_rtx x) |
53ea4c57 | 6032 | { |
6033 | if (!targetm.have_tls) | |
6034 | return false; | |
6035 | ||
7739c19e | 6036 | subrtx_iterator::array_type array; |
02c7697d | 6037 | FOR_EACH_SUBRTX (iter, array, x, ALL) |
7739c19e | 6038 | if (GET_CODE (*iter) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (*iter) != 0) |
6039 | return true; | |
6040 | return false; | |
53ea4c57 | 6041 | } |